[ 517.369056] env[67015]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=67015) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 517.369406] env[67015]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=67015) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 517.369532] env[67015]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=67015) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 517.369823] env[67015]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 517.457295] env[67015]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=67015) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 517.468853] env[67015]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=67015) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 517.604746] env[67015]: INFO nova.virt.driver [None req-4beebbde-6bd3-4326-b30d-9c227e4a506f None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 517.678225] env[67015]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.678386] env[67015]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.678490] env[67015]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=67015) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 520.832326] env[67015]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-980206d8-3c66-4310-93e6-9706b7c8e87d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.848380] env[67015]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=67015) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 520.848595] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-dfd94f87-f4a5-4ff2-bf2a-8428bcd73a43 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.873764] env[67015]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 6d1a9. [ 520.874029] env[67015]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.196s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.874489] env[67015]: INFO nova.virt.vmwareapi.driver [None req-4beebbde-6bd3-4326-b30d-9c227e4a506f None None] VMware vCenter version: 7.0.3 [ 520.877863] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd7e911-bf44-4e33-9330-2b26a8546139 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.894777] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b646853f-d582-4799-8dc7-46c32a49af38 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.900444] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb84504-2391-4f00-953a-9e9fb6fc5d73 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.907076] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175e1bcf-90b8-4c61-9197-eee98046e4d8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.919801] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5c64b3-c1b4-49fc-9939-06994cae8ec4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.925589] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aebb44c-04fd-47cd-86f6-bf7b031e7070 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.955705] env[67015]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-cadfd574-72fe-4506-8363-0afd9913c35d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.960682] env[67015]: DEBUG nova.virt.vmwareapi.driver [None req-4beebbde-6bd3-4326-b30d-9c227e4a506f None None] Extension org.openstack.compute already exists. {{(pid=67015) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 520.963344] env[67015]: INFO nova.compute.provider_config [None req-4beebbde-6bd3-4326-b30d-9c227e4a506f None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 520.981499] env[67015]: DEBUG nova.context [None req-4beebbde-6bd3-4326-b30d-9c227e4a506f None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),2967e1cc-9027-4cd1-a369-64d78c7d58e0(cell1) {{(pid=67015) load_cells /opt/stack/nova/nova/context.py:464}} [ 520.983365] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.983580] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.984233] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.984645] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Acquiring lock "2967e1cc-9027-4cd1-a369-64d78c7d58e0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.984841] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Lock "2967e1cc-9027-4cd1-a369-64d78c7d58e0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.985822] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Lock "2967e1cc-9027-4cd1-a369-64d78c7d58e0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.005557] env[67015]: INFO dbcounter [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Registered counter for database nova_cell0 [ 521.013712] env[67015]: INFO dbcounter [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Registered counter for database nova_cell1 [ 521.016730] env[67015]: DEBUG oslo_db.sqlalchemy.engines [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67015) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 521.017106] env[67015]: DEBUG oslo_db.sqlalchemy.engines [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67015) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 521.021582] env[67015]: DEBUG dbcounter [-] [67015] Writer thread running {{(pid=67015) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 521.022315] env[67015]: DEBUG dbcounter [-] [67015] Writer thread running {{(pid=67015) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 521.024405] env[67015]: ERROR nova.db.main.api [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 521.024405] env[67015]: result = function(*args, **kwargs) [ 521.024405] env[67015]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.024405] env[67015]: return func(*args, **kwargs) [ 521.024405] env[67015]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 521.024405] env[67015]: result = fn(*args, **kwargs) [ 521.024405] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 521.024405] env[67015]: return f(*args, **kwargs) [ 521.024405] env[67015]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 521.024405] env[67015]: return db.service_get_minimum_version(context, binaries) [ 521.024405] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 521.024405] env[67015]: _check_db_access() [ 521.024405] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 521.024405] env[67015]: stacktrace = ''.join(traceback.format_stack()) [ 521.024405] env[67015]: [ 521.025485] env[67015]: ERROR nova.db.main.api [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 521.025485] env[67015]: result = function(*args, **kwargs) [ 521.025485] env[67015]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.025485] env[67015]: return func(*args, **kwargs) [ 521.025485] env[67015]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 521.025485] env[67015]: result = fn(*args, **kwargs) [ 521.025485] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 521.025485] env[67015]: return f(*args, **kwargs) [ 521.025485] env[67015]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 521.025485] env[67015]: return db.service_get_minimum_version(context, binaries) [ 521.025485] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 521.025485] env[67015]: _check_db_access() [ 521.025485] env[67015]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 521.025485] env[67015]: stacktrace = ''.join(traceback.format_stack()) [ 521.025485] env[67015]: [ 521.025962] env[67015]: WARNING nova.objects.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 521.025995] env[67015]: WARNING nova.objects.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Failed to get minimum service version for cell 2967e1cc-9027-4cd1-a369-64d78c7d58e0 [ 521.026407] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Acquiring lock "singleton_lock" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.026571] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Acquired lock "singleton_lock" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.026821] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Releasing lock "singleton_lock" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.027151] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Full set of CONF: {{(pid=67015) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 521.027300] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ******************************************************************************** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 521.027430] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] Configuration options gathered from: {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 521.027569] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.027760] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 521.027890] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ================================================================================ {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 521.028115] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] allow_resize_to_same_host = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.028291] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] arq_binding_timeout = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.028426] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] backdoor_port = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.028556] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] backdoor_socket = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.028721] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] block_device_allocate_retries = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.028884] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] block_device_allocate_retries_interval = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029066] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cert = self.pem {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029257] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029432] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute_monitors = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029601] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] config_dir = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029774] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] config_drive_format = iso9660 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.029913] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030100] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] config_source = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030274] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] console_host = devstack {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030440] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] control_exchange = nova {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030602] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cpu_allocation_ratio = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030766] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] daemon = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.030979] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] debug = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.031202] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_access_ip_network_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.031385] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_availability_zone = nova {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.031544] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_ephemeral_format = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.031709] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_green_pool_size = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.031946] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.032134] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] default_schedule_zone = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.032299] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] disk_allocation_ratio = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.032483] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] enable_new_services = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.032675] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] enabled_apis = ['osapi_compute'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.032867] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] enabled_ssl_apis = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.033051] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] flat_injected = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.033296] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] force_config_drive = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.033565] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] force_raw_images = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.033842] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] graceful_shutdown_timeout = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.034144] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] heal_instance_info_cache_interval = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.034421] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] host = cpu-1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.034658] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.034862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] initial_disk_allocation_ratio = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.035111] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] initial_ram_allocation_ratio = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.035385] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.035634] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_build_timeout = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.035832] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_delete_interval = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036027] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_format = [instance: %(uuid)s] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036194] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_name_template = instance-%08x {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036364] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_usage_audit = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036539] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_usage_audit_period = month {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036710] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.036886] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] instances_path = /opt/stack/data/nova/instances {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037117] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] internal_service_availability_zone = internal {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037293] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] key = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037463] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] live_migration_retry_count = 30 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037630] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_config_append = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037810] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.037964] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_dir = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038164] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038317] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_options = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038506] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_rotate_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038681] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_rotate_interval_type = days {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038852] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] log_rotation_type = none {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.038988] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.039151] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.039334] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.039506] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.039643] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.039856] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] long_rpc_timeout = 1800 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.040109] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_concurrent_builds = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.040354] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_concurrent_live_migrations = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.040539] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_concurrent_snapshots = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.040709] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_local_block_devices = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.040880] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_logfile_count = 30 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041159] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] max_logfile_size_mb = 200 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041224] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] maximum_instance_delete_attempts = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041399] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metadata_listen = 0.0.0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041594] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metadata_listen_port = 8775 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041813] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metadata_workers = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.041987] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] migrate_max_retries = -1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.042175] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] mkisofs_cmd = genisoimage {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.042392] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] my_block_storage_ip = 10.180.1.21 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.042530] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] my_ip = 10.180.1.21 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.042701] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] network_allocate_retries = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.042905] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043091] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] osapi_compute_listen = 0.0.0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043265] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] osapi_compute_listen_port = 8774 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043441] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] osapi_compute_unique_server_name_scope = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043614] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] osapi_compute_workers = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043781] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] password_length = 12 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.043952] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] periodic_enable = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.044134] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] periodic_fuzzy_delay = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.044311] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] pointer_model = usbtablet {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.044492] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] preallocate_images = none {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.044950] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] publish_errors = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.044950] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] pybasedir = /opt/stack/nova {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045036] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ram_allocation_ratio = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045290] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rate_limit_burst = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045352] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rate_limit_except_level = CRITICAL {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045523] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rate_limit_interval = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045689] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reboot_timeout = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.045864] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reclaim_instance_interval = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046037] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] record = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046248] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reimage_timeout_per_gb = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046398] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] report_interval = 120 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046580] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rescue_timeout = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046743] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reserved_host_cpus = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.046912] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reserved_host_disk_mb = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047101] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reserved_host_memory_mb = 512 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047264] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] reserved_huge_pages = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047428] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] resize_confirm_window = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047589] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] resize_fs_using_block_device = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047749] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] resume_guests_state_on_host_boot = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.047919] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048104] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] rpc_response_timeout = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048272] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] run_external_periodic_tasks = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048440] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] running_deleted_instance_action = reap {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048603] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] running_deleted_instance_poll_interval = 1800 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048762] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] running_deleted_instance_timeout = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.048923] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler_instance_sync_interval = 120 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.049109] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_down_time = 720 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.049282] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] servicegroup_driver = db {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.049444] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] shelved_offload_time = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.049611] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] shelved_poll_interval = 3600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.049828] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] shutdown_timeout = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050013] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] source_is_ipv6 = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050181] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ssl_only = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050454] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050625] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] sync_power_state_interval = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050790] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] sync_power_state_pool_size = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.050983] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] syslog_log_facility = LOG_USER {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051153] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] tempdir = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051341] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] timeout_nbd = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051517] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] transport_url = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051638] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] update_resources_interval = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051803] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_cow_images = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.051975] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_eventlog = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.052140] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_journal = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.052304] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_json = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.052464] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_rootwrap_daemon = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.052698] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_stderr = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.052841] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] use_syslog = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053010] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vcpu_pin_set = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053188] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plugging_is_fatal = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053360] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plugging_timeout = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053531] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] virt_mkfs = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053699] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] volume_usage_poll_interval = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.053863] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] watch_log_file = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.054047] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] web = /usr/share/spice-html5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 521.054246] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_concurrency.disable_process_locking = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.054554] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.054740] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.054911] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.055098] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.055495] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.055495] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.055663] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.auth_strategy = keystone {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.055893] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.compute_link_prefix = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.056161] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.056370] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.dhcp_domain = novalocal {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.056556] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.enable_instance_password = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.056785] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.glance_link_prefix = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.056992] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.057205] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.057377] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.instance_list_per_project_cells = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.057547] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.list_records_by_skipping_down_cells = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.057712] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.local_metadata_per_cell = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.057885] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.max_limit = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.058069] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.metadata_cache_expiration = 15 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.058296] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.neutron_default_tenant_id = default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.058487] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.use_neutron_default_nets = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.058674] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.058845] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.059034] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.059210] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.059417] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_dynamic_targets = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.059606] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_jsonfile_path = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.059797] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.060041] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.backend = dogpile.cache.memcached {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.060290] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.backend_argument = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.060476] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.config_prefix = cache.oslo {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.060665] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.dead_timeout = 60.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.060882] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.debug_cache_backend = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061076] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.enable_retry_client = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061254] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.enable_socket_keepalive = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061431] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.enabled = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061652] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.enforce_fips_mode = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061763] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.expiration_time = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.061928] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.hashclient_retry_attempts = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062112] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.hashclient_retry_delay = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062280] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_dead_retry = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062438] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_password = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062605] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062773] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.062938] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_pool_maxsize = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.063127] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.063359] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_sasl_enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.063559] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.063734] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_socket_timeout = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.063899] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.memcache_username = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064084] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.proxies = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064251] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_password = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064425] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_sentinel_service_name = mymaster {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064602] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064772] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_server = localhost:6379 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.064941] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_socket_timeout = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065118] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.redis_username = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065286] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.retry_attempts = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065453] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.retry_delay = 0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065653] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.socket_keepalive_count = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065779] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.socket_keepalive_idle = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.065941] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.socket_keepalive_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066117] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.tls_allowed_ciphers = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066305] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.tls_cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066469] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.tls_certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066633] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.tls_enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066793] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cache.tls_keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.066966] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067158] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.auth_type = password {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067323] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067500] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.catalog_info = volumev3::publicURL {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067665] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067828] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.067991] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.cross_az_attach = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068170] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.debug = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068332] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.endpoint_template = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068495] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.http_retries = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068659] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068819] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.068991] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.os_region_name = RegionOne {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.069197] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.069426] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cinder.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.069618] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.069785] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.cpu_dedicated_set = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.069947] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.cpu_shared_set = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.070131] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.image_type_exclude_list = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.070304] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.070469] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.max_concurrent_disk_ops = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.070632] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.max_disk_devices_to_attach = -1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.070798] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071012] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071274] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.resource_provider_association_refresh = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071368] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.shutdown_retry_interval = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071549] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071772] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] conductor.workers = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.071924] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] console.allowed_origins = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072114] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] console.ssl_ciphers = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072292] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] console.ssl_minimum_version = default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072466] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] consoleauth.enforce_session_timeout = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072639] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] consoleauth.token_ttl = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072810] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.072971] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073153] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073313] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073472] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073630] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073793] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.073950] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074123] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074283] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074441] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074598] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074755] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.074940] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.service_type = accelerator {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075134] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075300] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075461] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075620] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075801] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.075967] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] cyborg.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.076165] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.backend = sqlalchemy {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.076337] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.connection = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.076506] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.connection_debug = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.076675] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.connection_parameters = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.076841] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.connection_recycle_time = 3600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.077015] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.connection_trace = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.077202] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.db_inc_retry_interval = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.077367] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.db_max_retries = 20 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.077535] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.db_max_retry_interval = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.077791] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.db_retry_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078501] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.max_overflow = 50 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078501] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.max_pool_size = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078501] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.max_retries = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078624] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078773] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.mysql_wsrep_sync_wait = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.078961] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.pool_timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.079160] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.retry_interval = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.079430] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.slave_connection = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.sqlite_synchronous = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] database.use_db_reconnect = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.backend = sqlalchemy {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.connection = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.connection_debug = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.080862] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.connection_parameters = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081079] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.connection_recycle_time = 3600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081079] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.connection_trace = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081079] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.db_inc_retry_interval = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081165] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.db_max_retries = 20 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081289] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.db_max_retry_interval = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081449] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.db_retry_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081663] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.max_overflow = 50 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081813] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.max_pool_size = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.081980] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.max_retries = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.082179] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.082376] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.082515] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.pool_timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.082681] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.retry_interval = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.082845] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.slave_connection = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083018] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] api_database.sqlite_synchronous = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083198] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] devices.enabled_mdev_types = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083379] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083550] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ephemeral_storage_encryption.default_format = luks {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083717] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ephemeral_storage_encryption.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.083882] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084087] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.api_servers = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084272] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084440] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084608] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084771] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.084933] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085114] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.debug = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085281] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.default_trusted_certificate_ids = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085445] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.enable_certificate_validation = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085610] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.enable_rbd_download = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085770] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.085938] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086111] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086274] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086435] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086599] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.num_retries = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086768] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.rbd_ceph_conf = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.086931] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.rbd_connect_timeout = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087139] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.rbd_pool = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087320] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.rbd_user = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087483] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087645] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087804] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.087973] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.service_type = image {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088153] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088315] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088474] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088634] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088815] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.088980] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.verify_glance_signatures = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.089152] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] glance.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.089325] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] guestfs.debug = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.089494] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] mks.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.089872] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.090091] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.manager_interval = 2400 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.090281] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.precache_concurrency = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.090457] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.remove_unused_base_images = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.090630] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.090801] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091031] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] image_cache.subdirectory_name = _base {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091208] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.api_max_retries = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091380] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.api_retry_interval = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091545] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091766] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.auth_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.091919] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092069] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092222] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092390] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.conductor_group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092553] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092716] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.092880] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093063] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093250] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093419] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093582] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093751] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.peer_list = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.093914] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094089] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094258] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.serial_console_state_timeout = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094418] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094590] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.service_type = baremetal {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094752] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.shard = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.094991] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095102] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095267] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095430] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095611] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095775] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ironic.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.095956] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.096166] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] key_manager.fixed_key = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.096367] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.096535] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.barbican_api_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.096698] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.barbican_endpoint = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.096871] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.barbican_endpoint_type = public {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097047] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.barbican_region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097218] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097382] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097549] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097715] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.097877] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098057] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.number_of_retries = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098229] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.retry_delay = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098396] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.send_service_user_token = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098563] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098724] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.098889] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.verify_ssl = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099065] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican.verify_ssl_path = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099264] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099437] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.auth_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099599] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099759] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.099926] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100103] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100268] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100437] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100601] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] barbican_service_user.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100773] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.approle_role_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.100967] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.approle_secret_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.101151] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.101313] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.101479] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.101643] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.101808] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102019] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.kv_mountpoint = secret {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102189] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.kv_path = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102358] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.kv_version = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102521] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.namespace = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102682] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.root_token_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.102844] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103012] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.ssl_ca_crt_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103181] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103369] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.use_ssl = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103540] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103715] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.103878] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.auth_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104060] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104227] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104393] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104556] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104718] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.104878] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105083] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105260] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105418] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105579] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105741] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.105903] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106075] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106252] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.service_type = identity {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106419] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106583] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106744] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.106906] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.107098] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.107268] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] keystone.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.107507] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.connection_uri = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.107681] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_mode = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.107852] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_model_extra_flags = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108055] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_models = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108246] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_power_governor_high = performance {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108421] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_power_governor_low = powersave {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108588] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_power_management = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108760] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.108928] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.device_detach_attempts = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109109] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.device_detach_timeout = 20 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109280] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.disk_cachemodes = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109445] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.disk_prefix = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109612] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.enabled_perf_events = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109778] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.file_backed_memory = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.109946] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.gid_maps = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.110135] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.hw_disk_discard = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.110302] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.hw_machine_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.110475] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_rbd_ceph_conf = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.110644] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.110810] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111037] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_rbd_glance_store_name = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111237] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_rbd_pool = rbd {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111418] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_type = default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111584] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.images_volume_group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111749] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.inject_key = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.111938] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.inject_partition = -2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112134] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.inject_password = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112306] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.iscsi_iface = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112473] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.iser_use_multipath = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112641] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_bandwidth = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112808] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.112974] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_downtime = 500 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113156] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113324] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113488] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_inbound_addr = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113651] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113816] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_permit_post_copy = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.113981] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_scheme = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.114204] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_timeout_action = abort {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.114381] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_tunnelled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.114549] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_uri = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.114715] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.live_migration_with_native_tls = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.114881] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.max_queues = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.115061] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.115297] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.115467] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.nfs_mount_options = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.115771] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.115951] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116132] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_iser_scan_tries = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116298] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_memory_encrypted_guests = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116466] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116635] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_pcie_ports = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116805] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.num_volume_scan_tries = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.116973] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.pmem_namespaces = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.117172] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.quobyte_client_cfg = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.117465] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.117640] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rbd_connect_timeout = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.117808] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.117973] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118151] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rbd_secret_uuid = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118313] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rbd_user = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118477] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118648] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.remote_filesystem_transport = ssh {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118811] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rescue_image_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.118974] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rescue_kernel_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.119149] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rescue_ramdisk_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.119320] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.119481] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.rx_queue_size = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.119649] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.smbfs_mount_options = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.119924] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.120139] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.snapshot_compression = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.120320] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.snapshot_image_format = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.120544] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.120714] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.sparse_logical_volumes = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.120903] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.swtpm_enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121091] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.swtpm_group = tss {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121268] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.swtpm_user = tss {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121440] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.sysinfo_serial = unique {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121604] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.tb_cache_size = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121764] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.tx_queue_size = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.121930] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.uid_maps = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122111] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.use_virtio_for_bridges = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122284] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.virt_type = kvm {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122455] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.volume_clear = zero {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122621] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.volume_clear_size = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122792] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.volume_use_multipath = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.122953] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_cache_path = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.123170] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.123360] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_mount_group = qemu {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.123531] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_mount_opts = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.123731] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124025] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124214] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.vzstorage_mount_user = stack {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124385] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124560] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124734] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.auth_type = password {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.124900] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125075] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125247] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125411] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125575] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125746] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.default_floating_pool = public {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.125908] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.126090] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.extension_sync_interval = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.126320] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.http_retries = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.126503] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.126670] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.126836] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127015] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127184] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127360] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.ovs_bridge = br-int {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127529] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.physnets = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127701] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.region_name = RegionOne {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.127866] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128051] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.service_metadata_proxy = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128222] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128394] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.service_type = network {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128560] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128721] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.128886] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129061] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129271] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129442] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] neutron.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129619] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] notifications.bdms_in_notifications = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129798] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] notifications.default_level = INFO {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.129974] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] notifications.notification_format = unversioned {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.130159] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] notifications.notify_on_state_change = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.130337] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.130513] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] pci.alias = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.130685] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] pci.device_spec = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.130853] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] pci.report_in_placement = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131062] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131246] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.auth_type = password {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131417] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131583] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131747] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.131935] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132163] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132293] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132458] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.default_domain_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132621] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.default_domain_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132785] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.domain_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.132947] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.domain_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133125] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133295] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133455] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133613] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133772] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.133941] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.password = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134119] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.project_domain_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134292] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.project_domain_name = Default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134459] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.project_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134633] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.project_name = service {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134802] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.region_name = RegionOne {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.134984] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.135177] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.135353] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.service_type = placement {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.135519] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.135682] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.135846] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136013] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.system_scope = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136188] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136349] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.trust_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136510] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.user_domain_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136681] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.user_domain_name = Default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.136842] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.user_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137032] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.username = placement {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137225] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137390] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] placement.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137568] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.cores = 20 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137736] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.count_usage_from_placement = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.137909] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138125] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.injected_file_content_bytes = 10240 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138304] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.injected_file_path_length = 255 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138475] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.injected_files = 5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138643] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.instances = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138814] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.key_pairs = 100 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.138983] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.metadata_items = 128 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.139167] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.ram = 51200 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.139337] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.recheck_quota = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.139507] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.server_group_members = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.139679] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] quota.server_groups = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.139852] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140029] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140200] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.image_metadata_prefilter = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140363] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140528] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.max_attempts = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140692] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.max_placement_results = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.140856] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141060] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.query_placement_for_image_type_support = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141236] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141414] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] scheduler.workers = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141593] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141766] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.141944] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142133] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142302] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142467] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142633] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142821] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.142991] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.host_subset_size = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143176] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143342] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143506] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143672] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.isolated_hosts = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143836] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.isolated_images = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.143999] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.144179] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.144351] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.144517] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.pci_in_placement = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.144680] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.144842] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145014] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145187] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145353] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145515] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145675] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.track_instance_changes = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.145851] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.146033] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metrics.required = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.146206] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metrics.weight_multiplier = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.146371] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.146535] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] metrics.weight_setting = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.146847] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147033] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147219] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.port_range = 10000:20000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147392] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147563] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147735] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] serial_console.serialproxy_port = 6083 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.147906] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148096] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.auth_type = password {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148263] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148426] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148592] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148757] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.148934] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.149136] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.send_service_user_token = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.149308] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.149470] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] service_user.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.149641] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.agent_enabled = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.149819] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.150149] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.150350] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.150525] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.html5proxy_port = 6082 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.150692] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.image_compression = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.150853] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.jpeg_compression = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151053] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.playback_compression = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151239] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.server_listen = 127.0.0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151412] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151572] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.streaming_mode = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151732] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] spice.zlib_compression = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.151900] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] upgrade_levels.baseapi = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152087] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] upgrade_levels.compute = auto {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152254] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] upgrade_levels.conductor = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152416] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] upgrade_levels.scheduler = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152625] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152744] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.auth_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.152904] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153079] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153248] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153412] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153573] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153735] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.153898] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vendordata_dynamic_auth.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154089] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.api_retry_count = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154255] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.ca_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154430] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.cache_prefix = devstack-image-cache {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154602] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.cluster_name = testcl1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154769] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.connection_pool_size = 10 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.154931] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.console_delay_seconds = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155116] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.datastore_regex = ^datastore.* {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155647] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155647] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.host_password = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155647] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.host_port = 443 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155821] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.host_username = administrator@vsphere.local {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.155990] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.insecure = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.156174] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.integration_bridge = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.156343] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.maximum_objects = 100 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.156501] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.pbm_default_policy = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.156667] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.pbm_enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.156830] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.pbm_wsdl_location = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157007] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157177] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.serial_port_proxy_uri = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157339] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.serial_port_service_uri = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157506] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.task_poll_interval = 0.5 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157680] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.use_linked_clone = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.157852] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.vnc_keymap = en-us {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.158032] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.vnc_port = 5900 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.158205] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vmware.vnc_port_total = 10000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.158392] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.auth_schemes = ['none'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.158568] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.158902] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159127] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159309] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.novncproxy_port = 6080 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159491] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.server_listen = 127.0.0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159664] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159827] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.vencrypt_ca_certs = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.159988] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.vencrypt_client_cert = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.160171] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vnc.vencrypt_client_key = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.160349] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.160515] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_deep_image_inspection = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.160678] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.160841] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161054] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161237] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.disable_rootwrap = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161403] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.enable_numa_live_migration = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161566] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161728] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.161890] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162065] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.libvirt_disable_apic = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162230] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162394] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162557] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162719] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.162876] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163050] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163215] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163378] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163540] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163707] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.163891] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164077] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.client_socket_timeout = 900 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164247] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.default_pool_size = 1000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164415] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.keep_alive = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164584] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.max_header_line = 16384 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164747] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.secure_proxy_ssl_header = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.164911] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.ssl_ca_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165086] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.ssl_cert_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165252] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.ssl_key_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165420] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.tcp_keepidle = 600 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165597] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165767] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] zvm.ca_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.165928] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] zvm.cloud_connector_url = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.166227] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.166404] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] zvm.reachable_timeout = 300 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.166586] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.enforce_new_defaults = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.166759] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.enforce_scope = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.166937] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.policy_default_rule = default {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167135] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167312] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.policy_file = policy.yaml {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167486] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167649] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167810] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.167969] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.168147] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.168318] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.168493] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.168669] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.connection_string = messaging:// {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.168850] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.enabled = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169064] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.es_doc_type = notification {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169245] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.es_scroll_size = 10000 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169420] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.es_scroll_time = 2m {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169585] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.filter_error_trace = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169756] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.hmac_keys = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.169926] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.sentinel_service_name = mymaster {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170109] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.socket_timeout = 0.1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170278] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.trace_requests = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170441] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler.trace_sqlalchemy = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170619] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler_jaeger.process_tags = {} {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170782] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler_jaeger.service_name_prefix = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.170966] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] profiler_otlp.service_name_prefix = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171162] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] remote_debug.host = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171325] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] remote_debug.port = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171505] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171668] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171831] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.171998] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172172] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172334] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172494] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172657] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172811] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.172993] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.173187] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.173361] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.173530] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.173701] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.173870] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174048] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174219] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174391] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174555] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174715] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.174879] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175056] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175221] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175388] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175552] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175714] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.175875] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176048] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176241] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176404] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176577] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176748] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.176914] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177098] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177269] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.ssl_version = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177433] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177625] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177798] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_notifications.retry = -1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.177978] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.178171] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_messaging_notifications.transport_url = **** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.178348] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.auth_section = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.178516] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.auth_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.178680] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.cafile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.178837] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.certfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179047] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.collect_timing = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179222] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.connect_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179391] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.connect_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179548] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.endpoint_id = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179708] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.endpoint_override = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.179870] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.insecure = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180041] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.keyfile = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180205] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.max_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180366] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.min_version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180523] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.region_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180682] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.retriable_status_codes = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.180845] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.service_name = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181038] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.service_type = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181216] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.split_loggers = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181381] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.status_code_retries = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181539] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.status_code_retry_delay = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181698] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.timeout = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.181858] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.valid_interfaces = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182027] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_limit.version = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182201] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_reports.file_event_handler = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182369] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182529] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] oslo_reports.log_dir = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182695] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.182859] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183019] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183187] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183357] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183518] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183687] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.183847] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184017] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184191] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184355] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184515] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] vif_plug_ovs_privileged.user = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184685] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.flat_interface = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.184860] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185065] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185260] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185433] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185604] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185773] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.185937] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186129] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186303] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.isolate_vif = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186477] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186646] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186819] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.186989] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.ovsdb_interface = native {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187178] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_vif_ovs.per_port_bridge = False {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187339] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_brick.lock_path = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187505] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187667] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187836] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.capabilities = [21] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.187996] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.188170] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.helper_command = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.188337] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.188501] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.188659] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] privsep_osbrick.user = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.188859] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189045] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.group = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189215] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.helper_command = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189382] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189546] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189704] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] nova_sys_admin.user = None {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 521.189834] env[67015]: DEBUG oslo_service.service [None req-a6efe542-16f8-4b55-b886-16f49eea87d2 None None] ******************************************************************************** {{(pid=67015) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 521.190268] env[67015]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 521.200634] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Getting list of instances from cluster (obj){ [ 521.200634] env[67015]: value = "domain-c8" [ 521.200634] env[67015]: _type = "ClusterComputeResource" [ 521.200634] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 521.201894] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a544bc43-4cfd-42d4-bffa-43b3f2acc129 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.211117] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Got total of 0 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 521.211651] env[67015]: WARNING nova.virt.vmwareapi.driver [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 521.212115] env[67015]: INFO nova.virt.node [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Generated node identity 82311841-8ff3-4f49-9053-67c5a45ef771 [ 521.212341] env[67015]: INFO nova.virt.node [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Wrote node identity 82311841-8ff3-4f49-9053-67c5a45ef771 to /opt/stack/data/n-cpu-1/compute_id [ 521.223995] env[67015]: WARNING nova.compute.manager [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Compute nodes ['82311841-8ff3-4f49-9053-67c5a45ef771'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 521.258223] env[67015]: INFO nova.compute.manager [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 521.281582] env[67015]: WARNING nova.compute.manager [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 521.282101] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.282373] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.282544] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.282713] env[67015]: DEBUG nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 521.283864] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8a9b3f-e668-4533-b9d7-22912e2bb303 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.292218] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1eb1fd-4689-48cf-8a30-248b585cd288 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.306016] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e96bb6-e34c-4bc1-b1c9-524884c618d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.312497] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897a0aaa-7183-4217-97e9-01f6287032de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.342360] env[67015]: DEBUG nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181072MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 521.342482] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.342907] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.355028] env[67015]: WARNING nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] No compute node record for cpu-1:82311841-8ff3-4f49-9053-67c5a45ef771: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 82311841-8ff3-4f49-9053-67c5a45ef771 could not be found. [ 521.370374] env[67015]: INFO nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 82311841-8ff3-4f49-9053-67c5a45ef771 [ 521.423035] env[67015]: DEBUG nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 521.423211] env[67015]: DEBUG nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 521.519896] env[67015]: INFO nova.scheduler.client.report [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] [req-e43fdef5-3120-447a-b889-9534f5219d94] Created resource provider record via placement API for resource provider with UUID 82311841-8ff3-4f49-9053-67c5a45ef771 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 521.537471] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e81a760-cba2-4994-b95e-bf0681e04009 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.545232] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3316c8-f56d-48ef-b939-3a73b27fb541 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.575244] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633854ff-dcb3-48ae-8900-6fcbba615010 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.582709] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e61b125-b9e3-4cd0-93ca-9192e4fce0e1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.596506] env[67015]: DEBUG nova.compute.provider_tree [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 521.672851] env[67015]: DEBUG nova.scheduler.client.report [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Updated inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 521.672851] env[67015]: DEBUG nova.compute.provider_tree [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Updating resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 generation from 0 to 1 during operation: update_inventory {{(pid=67015) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 521.672851] env[67015]: DEBUG nova.compute.provider_tree [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 521.721965] env[67015]: DEBUG nova.compute.provider_tree [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Updating resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 generation from 1 to 2 during operation: update_traits {{(pid=67015) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 521.740777] env[67015]: DEBUG nova.compute.resource_tracker [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 521.741024] env[67015]: DEBUG oslo_concurrency.lockutils [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.398s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.741200] env[67015]: DEBUG nova.service [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Creating RPC server for service compute {{(pid=67015) start /opt/stack/nova/nova/service.py:182}} [ 521.755432] env[67015]: DEBUG nova.service [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] Join ServiceGroup membership for this service compute {{(pid=67015) start /opt/stack/nova/nova/service.py:199}} [ 521.755620] env[67015]: DEBUG nova.servicegroup.drivers.db [None req-55cfd1a4-9103-4838-b8e5-c48b7ba4e48a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=67015) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 531.023349] env[67015]: DEBUG dbcounter [-] [67015] Writing DB stats nova_cell0:SELECT=1 {{(pid=67015) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 531.024033] env[67015]: DEBUG dbcounter [-] [67015] Writing DB stats nova_cell1:SELECT=1 {{(pid=67015) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 561.644800] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "860124dd-da7e-4beb-832f-7a9ab9580aed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.644800] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.668147] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 561.811622] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.815256] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.815440] env[67015]: INFO nova.compute.claims [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.967485] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041e1a60-c262-434e-a1fd-a4c578a91d0d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.980220] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa84cb5-2b1c-490a-86a9-013972d68d6c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.025217] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6b6208-8272-4d15-85d7-dfb964783a27 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.037659] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416bd161-743a-4919-ad85-9595b8baabc7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.051870] env[67015]: DEBUG nova.compute.provider_tree [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.062042] env[67015]: DEBUG nova.scheduler.client.report [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 562.087778] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.273s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.088026] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 562.132363] env[67015]: DEBUG nova.compute.utils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.138113] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 562.138113] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 562.156033] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 562.271253] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 562.822911] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.823495] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.844121] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 562.911606] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.912068] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.914167] env[67015]: INFO nova.compute.claims [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.024561] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e18d21-1ad4-4d45-ac91-bf76adde39e9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.034789] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6110fab-2f9a-4d78-8383-2b8b4b9852dc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.070114] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee1653c-7b5a-42db-a5cb-388348ac2058 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.077462] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc2e7d7-f638-4d33-9e3f-7a08baa8676d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.094863] env[67015]: DEBUG nova.compute.provider_tree [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.109613] env[67015]: DEBUG nova.scheduler.client.report [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 563.136623] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.225s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.137892] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 563.189595] env[67015]: DEBUG nova.compute.utils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.191009] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Not allocating networking since 'none' was specified. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 563.206599] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 563.243547] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.244209] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.244209] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.244392] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.244587] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.244774] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.245057] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.245268] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.245710] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.245913] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.246179] env[67015]: DEBUG nova.virt.hardware [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.247414] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd02d475-7729-4c88-9cd1-598805eb5683 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.255987] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7c7fdb-cd05-41dc-9551-32541ee3211c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.287422] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a849b309-e965-415a-8850-a00fd81896d8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.317734] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 563.345901] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.346182] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.346375] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.346569] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.346792] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.346877] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.347122] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.347346] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.347495] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.347681] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.347847] env[67015]: DEBUG nova.virt.hardware [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.348799] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a8d7ae-92be-4cbf-8ba6-5f0a34e5285d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.359489] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3d1609-4cd6-4106-a418-938dcbc40fb0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.377167] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Instance VIF info [] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 563.388312] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 563.388969] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a947271c-10f0-4ec8-aa66-c7fae389ad47 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.402048] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Created folder: OpenStack in parent group-v4. [ 563.403032] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating folder: Project (4873879423ec403da15d79bdf02fa29f). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 563.403032] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04597275-8d32-4c70-b804-86c354cacc4c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.414155] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Created folder: Project (4873879423ec403da15d79bdf02fa29f) in parent group-v623108. [ 563.414236] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating folder: Instances. Parent ref: group-v623109. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 563.425055] env[67015]: DEBUG nova.policy [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dba2a2da1a7e46e48c61956d96cbaf3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18efbd420f2b481697fb2fdc419a4666', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 563.426409] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11a5074-ef67-4e0e-85de-51bdf8addcda {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.436550] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Created folder: Instances in parent group-v623109. [ 563.436811] env[67015]: DEBUG oslo.service.loopingcall [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.438265] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 563.438541] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc01341c-8d4c-4a9b-b3ab-6e5e63340a26 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.463015] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 563.463015] env[67015]: value = "task-3114357" [ 563.463015] env[67015]: _type = "Task" [ 563.463015] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.474730] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114357, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.977218] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114357, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.985024] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "bf738778-771f-4a1a-b83e-d786c67dafc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.986425] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.005653] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 564.074319] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.074695] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.077539] env[67015]: INFO nova.compute.claims [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.196041] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a498c8d-81da-4841-8d4a-af072f45d2ad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.205624] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2532c5-a801-495e-a4e5-a91287116efb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.252871] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e743b6-f495-4d95-8f3b-b1b8c80d679e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.258679] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59aee8f-4702-4c16-a3f7-953ff54dfab7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.273073] env[67015]: DEBUG nova.compute.provider_tree [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.285684] env[67015]: DEBUG nova.scheduler.client.report [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 564.303564] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.228s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.303713] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 564.348569] env[67015]: DEBUG nova.compute.utils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 564.350083] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 564.350328] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 564.371237] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 564.423104] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Successfully created port: ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.467837] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 564.480520] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114357, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.502473] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.502762] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.502906] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.503115] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.503259] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.503409] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.503621] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.503923] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.503989] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.504123] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.504303] env[67015]: DEBUG nova.virt.hardware [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.505178] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f78eba-ac60-49f7-82dc-455a42897d64 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.515081] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f0168b-08d8-4ff2-9a63-f89bdaff0d58 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.590404] env[67015]: DEBUG nova.policy [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cfcc005c725e4028b4588f424c6d598f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c134cc4c3b7041e5aad7cd35c439b6bd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 564.698104] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.698304] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.709042] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 564.784876] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.785186] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.786605] env[67015]: INFO nova.compute.claims [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.931697] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0266f284-02a8-4a38-b044-b75109344bec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.939775] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92f4fe4-f35f-48a4-ae54-4e84e6d3bbbe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.977342] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae3d9e9-88c7-41cb-9225-6632e5324be1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.985806] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114357, 'name': CreateVM_Task, 'duration_secs': 1.305343} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.987717] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 564.989353] env[67015]: DEBUG oslo_vmware.service [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d4a947-26a9-489d-9520-3e632887aee7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.996336] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.996426] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.997146] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 564.998314] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa4218e-ae88-449f-9173-3bbfccd494ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.007669] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1b55d69-4622-4129-8baf-6d3ff13e8dea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.014029] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for the task: (returnval){ [ 565.014029] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5221b647-a162-3f68-b6d5-773100ad92a0" [ 565.014029] env[67015]: _type = "Task" [ 565.014029] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.023447] env[67015]: DEBUG nova.compute.provider_tree [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.035672] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.035909] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 565.036405] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.036405] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.036693] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 565.037790] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b2ee277-ee48-49cd-b84b-77be4ef56035 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.040475] env[67015]: DEBUG nova.scheduler.client.report [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 565.060882] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 565.060998] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 565.061952] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7447ee03-408a-4982-aabd-1ee2edcddcc5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.066208] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.066660] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 565.073042] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc92db02-78de-40cf-b457-38dec7efd2d4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.078279] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for the task: (returnval){ [ 565.078279] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a883ee-5c3b-3c0e-c7b0-50c84419c664" [ 565.078279] env[67015]: _type = "Task" [ 565.078279] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.086360] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a883ee-5c3b-3c0e-c7b0-50c84419c664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.118151] env[67015]: DEBUG nova.compute.utils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.120571] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 565.120800] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 565.134273] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 565.209178] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 565.243156] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.243449] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.243579] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.244295] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.244295] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.244295] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.244295] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.244448] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.244563] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.244729] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.244902] env[67015]: DEBUG nova.virt.hardware [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.246123] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac537e4-1869-4037-a4e5-99f36f8c0438 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.259036] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2868688-39f4-40a5-b09b-8f944992331a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.275614] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Successfully created port: df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.300649] env[67015]: DEBUG nova.policy [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8e8498469664a1aa526beedf6cf5e1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63c0561098d0449faf0c374dec938510', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 565.589327] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 565.589658] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating directory with path [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 565.590175] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7642845d-3054-49cd-b3d0-94c102e2b161 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.612397] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Created directory with path [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 565.614239] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Fetch image to [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 565.614239] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 565.614239] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a9fdc1-a950-437e-b92e-974a0824a412 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.624673] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea430df-e1e9-4371-8ca7-546085693a84 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.636666] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30a7ebf-ac8e-48fc-a72b-d90890fd63ed {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.676886] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c31f2c-86ae-4c44-85c7-4b5e22172ce9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.688039] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7bb35dd3-7d72-4cdf-aea8-9cb33e7b5ed9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.708372] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 565.792494] env[67015]: DEBUG oslo_vmware.rw_handles [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 565.869187] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.869600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.871326] env[67015]: DEBUG oslo_vmware.rw_handles [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 565.871515] env[67015]: DEBUG oslo_vmware.rw_handles [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 565.892021] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 565.966533] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.966782] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.968303] env[67015]: INFO nova.compute.claims [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.029207] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Successfully created port: bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.125652] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.125652] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.144289] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67d6b9e-bab5-4baf-aa45-dfa7a5352fc1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.150709] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 566.158201] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d9fc56-053e-4d93-b010-f5092a629378 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.191574] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c453b0-81e4-4f8a-b8ed-917359bf0ebd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.200244] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f21252-9cc7-4f59-bbd1-33f56dc151f8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.210234] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Successfully updated port: ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.222319] env[67015]: DEBUG nova.compute.provider_tree [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.233390] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.234031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquired lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.234031] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.235817] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.243367] env[67015]: DEBUG nova.scheduler.client.report [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 566.261853] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.262306] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 566.267271] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.030s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.267704] env[67015]: INFO nova.compute.claims [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.301817] env[67015]: DEBUG nova.compute.utils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.303549] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 566.303841] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 566.311962] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 566.396761] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 566.441256] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.441498] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.441684] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.441910] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.442081] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.442218] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.442432] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.442593] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.442872] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.442950] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.443137] env[67015]: DEBUG nova.virt.hardware [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.444015] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425f6e02-d6eb-4c4e-a584-72dfc5886de4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.447865] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823cb097-a88f-4299-890a-f9ce206ac76b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.457351] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cac533-489a-499b-999f-0f8f22c0fb4c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.461439] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651b45e7-e5ce-4250-8075-066545a72a6f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.503125] env[67015]: DEBUG nova.policy [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '288a0e8be153403fa8dcb0175c5a0643', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba7a189cc3084e6db0d0a432140bf819', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 566.505322] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.507911] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e39dbf-f265-4a45-af6c-9315f0685c01 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.517510] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507400f6-88d2-4eac-9555-58e8d789045c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.533037] env[67015]: DEBUG nova.compute.provider_tree [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.552684] env[67015]: DEBUG nova.scheduler.client.report [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 566.578500] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.312s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.578697] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 566.639906] env[67015]: DEBUG nova.compute.utils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.641084] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Not allocating networking since 'none' was specified. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 566.653977] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 566.765744] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 566.810496] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.810743] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.810896] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.811093] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.811436] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.811666] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.811918] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.812090] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.812435] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.812641] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.812841] env[67015]: DEBUG nova.virt.hardware [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.815592] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f9b817-a9d4-4c5a-9aab-c46a53e86015 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.830738] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050d7463-7f79-4986-9326-ec09a8ee8829 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.838656] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Successfully updated port: df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.855451] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Instance VIF info [] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 566.862910] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Creating folder: Project (b3b500e012654030ace3435803befeb3). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.863190] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7506098-da8e-4236-a881-d14939274d3c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.865255] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.865493] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquired lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.865566] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.870988] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.871227] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.879327] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "13e28171-8074-4660-91cf-f6d569414fc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.879770] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.881319] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Created folder: Project (b3b500e012654030ace3435803befeb3) in parent group-v623108. [ 566.881319] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Creating folder: Instances. Parent ref: group-v623112. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.881678] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25b89775-f7f1-45c7-94f7-7cefecd47871 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.892428] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 566.895990] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Created folder: Instances in parent group-v623112. [ 566.896105] env[67015]: DEBUG oslo.service.loopingcall [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.902369] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 566.902369] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47f9ee7d-05d2-461e-ad4f-411e10740cd7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.918365] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 566.925652] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.925652] env[67015]: value = "task-3114360" [ 566.925652] env[67015]: _type = "Task" [ 566.925652] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.939731] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114360, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.957826] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.958211] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.960115] env[67015]: INFO nova.compute.claims [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.980109] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.003964] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.053409] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Updating instance_info_cache with network_info: [{"id": "ad607619-3a16-4045-956a-b5a81b98e301", "address": "fa:16:3e:94:3a:d9", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad607619-3a", "ovs_interfaceid": "ad607619-3a16-4045-956a-b5a81b98e301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.071786] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Releasing lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.072181] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance network_info: |[{"id": "ad607619-3a16-4045-956a-b5a81b98e301", "address": "fa:16:3e:94:3a:d9", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad607619-3a", "ovs_interfaceid": "ad607619-3a16-4045-956a-b5a81b98e301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 567.073007] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:3a:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad607619-3a16-4045-956a-b5a81b98e301', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.082836] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Creating folder: Project (18efbd420f2b481697fb2fdc419a4666). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.086154] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5543e302-442c-49e9-869d-6b5da92ab8e3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.104446] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Created folder: Project (18efbd420f2b481697fb2fdc419a4666) in parent group-v623108. [ 567.104446] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Creating folder: Instances. Parent ref: group-v623115. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.104581] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4208ca49-1554-4a61-ac7a-ce4aa8e016b6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.115150] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Created folder: Instances in parent group-v623115. [ 567.115358] env[67015]: DEBUG oslo.service.loopingcall [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.116536] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.119425] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-001a0985-225c-42b0-bac8-94d75adc5922 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.139201] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.139201] env[67015]: value = "task-3114363" [ 567.139201] env[67015]: _type = "Task" [ 567.139201] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.147697] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114363, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.198576] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e975e9-083f-4e7a-a0c4-1a7a380f1839 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.206016] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2098f14c-b89f-4e22-8345-74f9a6f109df {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.241035] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63abe823-f6aa-4d56-b88b-3a5df9b94e48 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.249665] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e4bbf0-221c-4f54-8b1e-6c77b58b3b30 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.266136] env[67015]: DEBUG nova.compute.provider_tree [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.280733] env[67015]: DEBUG nova.scheduler.client.report [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 567.299982] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.342s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.300528] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 567.305366] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.325s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.309071] env[67015]: INFO nova.compute.claims [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.342899] env[67015]: DEBUG nova.compute.utils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.343786] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 567.344167] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.356056] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 567.439170] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114360, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.469865] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Successfully created port: 7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.476453] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 567.486842] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Updating instance_info_cache with network_info: [{"id": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "address": "fa:16:3e:67:8b:5a", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf79b2fb-ee", "ovs_interfaceid": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.506182] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Releasing lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.506314] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Instance network_info: |[{"id": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "address": "fa:16:3e:67:8b:5a", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf79b2fb-ee", "ovs_interfaceid": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 567.508859] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.509102] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.509264] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.509446] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.509592] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.509742] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.509954] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.510125] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.510287] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.510492] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.510620] env[67015]: DEBUG nova.virt.hardware [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.511261] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:8b:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.520386] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Creating folder: Project (c134cc4c3b7041e5aad7cd35c439b6bd). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.521638] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61da8543-2433-4d69-926d-7350a986d53d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.528418] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9806077-0df8-40a9-a6a3-51862109a52e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.536688] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a1e366-2679-45e8-83ee-6ea2e2efe569 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.542727] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Created folder: Project (c134cc4c3b7041e5aad7cd35c439b6bd) in parent group-v623108. [ 567.542935] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Creating folder: Instances. Parent ref: group-v623118. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.545740] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f6e55b1-38a0-45af-9c58-43cdee251e8e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.566585] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Created folder: Instances in parent group-v623118. [ 567.566844] env[67015]: DEBUG oslo.service.loopingcall [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.567198] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.567432] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c13fd37a-eb6d-4b29-900d-d4eb6aada086 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.590982] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.590982] env[67015]: value = "task-3114366" [ 567.590982] env[67015]: _type = "Task" [ 567.590982] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.597893] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114366, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.600792] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06e5142-6b0b-4d96-9144-96a32b158d65 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.605738] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bc102f-7d6d-4d65-b8a5-17accaf69fe5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.635323] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05feda8a-64de-406f-9cc8-4aa94b242e3c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.647008] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587c9d16-9078-4e19-bbd5-962806826942 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.655487] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114363, 'name': CreateVM_Task} progress is 25%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.666137] env[67015]: DEBUG nova.compute.provider_tree [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.677458] env[67015]: DEBUG nova.scheduler.client.report [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 567.685169] env[67015]: DEBUG nova.policy [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48441807d0a54004a9ad41afca6ef53a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58cb53c4e60c4084a1211154a2c2b12a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 567.694067] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.389s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.694634] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 567.739301] env[67015]: DEBUG nova.compute.utils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.739610] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 567.739779] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.744577] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Successfully updated port: bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.756808] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 567.759856] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.759925] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquired lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.760095] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.861019] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 567.900842] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.900938] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.901844] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.901844] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.901844] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.901844] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.902046] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.902046] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.903392] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.903392] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.903392] env[67015]: DEBUG nova.virt.hardware [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.903739] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4b49c0-e324-4912-b29c-1e6a5c27f41c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.912850] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319b58bb-5925-47a2-af8c-53f9e7f2fd91 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.939294] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114360, 'name': CreateVM_Task, 'duration_secs': 0.769263} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.939294] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 567.939294] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.939414] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.939678] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 567.940873] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bea37277-4f85-42d9-b93b-775b94f3f9fc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.945082] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for the task: (returnval){ [ 567.945082] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52c65a07-c1ab-8e3d-645c-fb4cb3b39bcf" [ 567.945082] env[67015]: _type = "Task" [ 567.945082] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.952930] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52c65a07-c1ab-8e3d-645c-fb4cb3b39bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.014959] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.081099] env[67015]: DEBUG nova.policy [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b5ca6f9436244a5a9d70a56ab576815', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '616faf5c4d5244e89a15f2d5c8e91dc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 568.100693] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114366, 'name': CreateVM_Task, 'duration_secs': 0.349113} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.101037] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 568.128426] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.153490] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114363, 'name': CreateVM_Task, 'duration_secs': 0.648743} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.154660] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 568.155308] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.172674] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.172925] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.191945] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 568.211729] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "781b688b-ec99-4423-99b2-2502c6e8a75d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.211975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.224431] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 568.289867] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.290249] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.291819] env[67015]: INFO nova.compute.claims [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.295400] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.432326] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Updating instance_info_cache with network_info: [{"id": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "address": "fa:16:3e:26:42:02", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbddcd632-a2", "ovs_interfaceid": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.457154] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.458152] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.458152] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.459858] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.459858] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 568.459977] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Releasing lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.462897] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance network_info: |[{"id": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "address": "fa:16:3e:26:42:02", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbddcd632-a2", "ovs_interfaceid": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 568.462897] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-487cbd8c-aef9-40a4-94d5-e538318006ac {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.465169] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:42:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bddcd632-a286-4f4c-a565-aebaaa7dbb59', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.474548] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Creating folder: Project (63c0561098d0449faf0c374dec938510). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.478672] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca1932a8-ebb1-41a0-9414-98781ccd1b05 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.485031] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Waiting for the task: (returnval){ [ 568.485031] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52495e53-1724-1bff-06f5-52413b2643df" [ 568.485031] env[67015]: _type = "Task" [ 568.485031] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.490887] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Created folder: Project (63c0561098d0449faf0c374dec938510) in parent group-v623108. [ 568.491089] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Creating folder: Instances. Parent ref: group-v623121. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.491752] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c60bf6e-8199-4314-aa91-e2011886cf3d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.497979] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52495e53-1724-1bff-06f5-52413b2643df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.510242] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Created folder: Instances in parent group-v623121. [ 568.510530] env[67015]: DEBUG oslo.service.loopingcall [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.511132] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 568.512750] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e8cc6b6-c491-4972-a2c1-b39a2c8552d7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.545518] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.545518] env[67015]: value = "task-3114369" [ 568.545518] env[67015]: _type = "Task" [ 568.545518] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.557259] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114369, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.596928] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028eeb12-1a83-4711-bf90-cfc6e20d357c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.608975] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d201d551-e843-4391-b2c6-81a861c71e49 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.647718] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4900bd38-4e03-42c3-b618-3c07a1bbb595 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.659510] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c3a820-805f-457f-abd8-7d1433d69c21 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.674683] env[67015]: DEBUG nova.compute.provider_tree [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.686965] env[67015]: DEBUG nova.scheduler.client.report [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 568.706126] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.416s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.706714] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 568.709507] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.414s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.714177] env[67015]: INFO nova.compute.claims [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.758213] env[67015]: DEBUG nova.compute.utils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 568.761815] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 568.762133] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 568.769958] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 568.867224] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.867410] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.869794] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 568.906372] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 568.906648] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 568.906823] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.906963] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 568.907133] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.907284] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 568.907495] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 568.907656] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 568.907823] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 568.907985] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 568.908341] env[67015]: DEBUG nova.virt.hardware [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 568.910064] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df74b13b-807a-4247-be40-2139f9284650 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.922575] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea29aade-f576-4bcd-8e12-d47584198b9e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.958143] env[67015]: DEBUG nova.policy [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3eed03a901424b27ac2d46eba439bac1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf303c3648a940c1a5384db1398b7995', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 568.978360] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Successfully created port: 57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.004055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.004055] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.004055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.004055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.004379] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.004379] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e7ac712-2a90-4a2f-bc21-3f469d91ec3d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.007650] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for the task: (returnval){ [ 569.007650] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52b186df-bfc0-89ec-9671-1f2e98127c07" [ 569.007650] env[67015]: _type = "Task" [ 569.007650] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.013436] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acebe414-20ee-46e4-aae7-f9016edccbf2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.023850] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.024402] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.024402] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.025319] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de33942b-3bca-40cc-9835-8a63df835176 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.064153] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e92d59-9ecc-45e2-983a-c154a454e14b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.075549] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e37b6b-d04d-4a4b-8aeb-e7f39d6fdd2a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.079294] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114369, 'name': CreateVM_Task, 'duration_secs': 0.360767} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.081327] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Successfully created port: b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.082188] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 569.085365] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.085365] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.085365] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.092085] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-799ad189-3125-409c-ad06-491af49c5cff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.094253] env[67015]: DEBUG nova.compute.provider_tree [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.098454] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for the task: (returnval){ [ 569.098454] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52c0b139-f27b-887a-13d3-3222fde471a9" [ 569.098454] env[67015]: _type = "Task" [ 569.098454] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.106689] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52c0b139-f27b-887a-13d3-3222fde471a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.108330] env[67015]: DEBUG nova.scheduler.client.report [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 569.131473] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.422s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.132038] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 569.191057] env[67015]: DEBUG nova.compute.utils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.191057] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 569.191057] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.198545] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Successfully updated port: 7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.201033] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 569.212384] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.212384] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquired lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.212384] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.291259] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 569.322221] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.322501] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.322809] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.322894] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.322987] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.327593] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.327995] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.328135] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.328344] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.329025] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.329025] env[67015]: DEBUG nova.virt.hardware [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.329911] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59d6dca-618c-4f60-af91-52714bf33a00 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.341446] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9ac67b-a913-4630-98c2-ecea4195b47e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.369807] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.398728] env[67015]: DEBUG nova.policy [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '585aaf69bf474881ba8f6f13b7fba1b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54ce93521ed148d397c6dd8905557b34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 569.575739] env[67015]: DEBUG nova.compute.manager [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Received event network-vif-plugged-ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.575958] env[67015]: DEBUG oslo_concurrency.lockutils [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] Acquiring lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.576254] env[67015]: DEBUG oslo_concurrency.lockutils [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.576426] env[67015]: DEBUG oslo_concurrency.lockutils [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.576591] env[67015]: DEBUG nova.compute.manager [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] No waiting events found dispatching network-vif-plugged-ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.576751] env[67015]: WARNING nova.compute.manager [req-39d6d85c-d43d-413f-9881-eb370332e2c4 req-0819e75d-7e04-4357-85e9-cb45f003dcbd service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Received unexpected event network-vif-plugged-ad607619-3a16-4045-956a-b5a81b98e301 for instance with vm_state building and task_state spawning. [ 569.611253] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.611427] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.611636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.614526] env[67015]: DEBUG nova.compute.manager [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Received event network-vif-plugged-df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.614777] env[67015]: DEBUG oslo_concurrency.lockutils [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] Acquiring lock "bf738778-771f-4a1a-b83e-d786c67dafc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.614915] env[67015]: DEBUG oslo_concurrency.lockutils [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.615093] env[67015]: DEBUG oslo_concurrency.lockutils [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.615261] env[67015]: DEBUG nova.compute.manager [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] No waiting events found dispatching network-vif-plugged-df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.615425] env[67015]: WARNING nova.compute.manager [req-a29f8a2c-4975-4e1c-9446-bc1ea7d5ec50 req-a66d96ab-f627-414d-bd34-f8f58671a8ca service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Received unexpected event network-vif-plugged-df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 for instance with vm_state building and task_state spawning. [ 570.064301] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Updating instance_info_cache with network_info: [{"id": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "address": "fa:16:3e:78:6e:a2", "network": {"id": "86766f70-0b24-4064-8c4e-bc973f5d7d05", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-127923232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba7a189cc3084e6db0d0a432140bf819", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e886dc2-6d", "ovs_interfaceid": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.078769] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Releasing lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.079171] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance network_info: |[{"id": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "address": "fa:16:3e:78:6e:a2", "network": {"id": "86766f70-0b24-4064-8c4e-bc973f5d7d05", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-127923232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba7a189cc3084e6db0d0a432140bf819", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e886dc2-6d", "ovs_interfaceid": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 570.080099] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:6e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ebd8af-aaf6-4d04-b869-3882e2571ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e886dc2-6df7-4d0e-957c-f5e018a4c239', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.089049] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Creating folder: Project (ba7a189cc3084e6db0d0a432140bf819). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.089710] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48f9ecce-6344-4ebb-bc14-f383d9d6af70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.101930] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Created folder: Project (ba7a189cc3084e6db0d0a432140bf819) in parent group-v623108. [ 570.102059] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Creating folder: Instances. Parent ref: group-v623124. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.102960] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1994ea1d-bb66-403b-8bad-61ec9ab6ca7c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.113859] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Created folder: Instances in parent group-v623124. [ 570.114291] env[67015]: DEBUG oslo.service.loopingcall [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.114544] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 570.114810] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffc4617e-58ac-44ba-a028-bd869d7bf0d1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.140319] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.140319] env[67015]: value = "task-3114372" [ 570.140319] env[67015]: _type = "Task" [ 570.140319] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.148615] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114372, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.330706] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Successfully created port: 20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.484094] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Successfully created port: 585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.653630] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114372, 'name': CreateVM_Task, 'duration_secs': 0.335073} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.653831] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 570.654511] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.654676] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.654991] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 570.655252] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fbc71d1-1e80-409f-a2b5-169695604714 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.663561] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for the task: (returnval){ [ 570.663561] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52be2b8c-2c15-cc00-2f24-813162bc5763" [ 570.663561] env[67015]: _type = "Task" [ 570.663561] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.672485] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52be2b8c-2c15-cc00-2f24-813162bc5763, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.760673] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.790015] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 570.790015] env[67015]: value = "domain-c8" [ 570.790015] env[67015]: _type = "ClusterComputeResource" [ 570.790015] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 570.791324] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837f3d16-571d-44aa-a9a6-83a0c7e5ce13 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.805018] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 6 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 570.805723] env[67015]: WARNING nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] While synchronizing instance power states, found 10 instances in the database and 6 instances on the hypervisor. [ 570.805723] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 860124dd-da7e-4beb-832f-7a9ab9580aed {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.805723] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 96efb9a4-3c83-4dea-94f2-d93dead6a2ca {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.805723] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid bf738778-771f-4a1a-b83e-d786c67dafc0 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.805908] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.805962] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806082] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806237] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid e0ac77e1-fb77-4b97-bacc-838cc3e16bbc {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806383] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 13e28171-8074-4660-91cf-f6d569414fc6 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806532] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 98ad3e5c-065d-4561-890c-46d5ca0a8f7f {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806675] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 781b688b-ec99-4423-99b2-2502c6e8a75d {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 570.806985] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "860124dd-da7e-4beb-832f-7a9ab9580aed" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.807240] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.807441] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "bf738778-771f-4a1a-b83e-d786c67dafc0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.807635] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.807825] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.808027] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.812259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.812259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "13e28171-8074-4660-91cf-f6d569414fc6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.812259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.812259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "781b688b-ec99-4423-99b2-2502c6e8a75d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.812555] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.812555] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 570.812555] env[67015]: value = "domain-c8" [ 570.812555] env[67015]: _type = "ClusterComputeResource" [ 570.812555] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 570.815159] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea449a4e-6e05-482c-8195-7554f48cb994 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.835263] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 6 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 570.959986] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Successfully updated port: 57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 570.978051] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.978051] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.978051] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.069186] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.176377] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.176691] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 571.176831] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.243779] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Successfully updated port: b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 571.263263] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.263337] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquired lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.263734] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.330133] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.365796] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Updating instance_info_cache with network_info: [{"id": "57dd67b8-9e16-463e-a1ef-065b4041e800", "address": "fa:16:3e:bb:c3:46", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57dd67b8-9e", "ovs_interfaceid": "57dd67b8-9e16-463e-a1ef-065b4041e800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.382135] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.382222] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance network_info: |[{"id": "57dd67b8-9e16-463e-a1ef-065b4041e800", "address": "fa:16:3e:bb:c3:46", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57dd67b8-9e", "ovs_interfaceid": "57dd67b8-9e16-463e-a1ef-065b4041e800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 571.382636] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:c3:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57dd67b8-9e16-463e-a1ef-065b4041e800', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.395998] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating folder: Project (58cb53c4e60c4084a1211154a2c2b12a). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 571.395998] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d73ab05-86d5-4750-86fe-8f1f3f208512 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.408204] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created folder: Project (58cb53c4e60c4084a1211154a2c2b12a) in parent group-v623108. [ 571.409345] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating folder: Instances. Parent ref: group-v623127. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 571.409345] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6e3c71e-1b64-4d53-b067-f025af81608d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.417998] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created folder: Instances in parent group-v623127. [ 571.418271] env[67015]: DEBUG oslo.service.loopingcall [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.418497] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 571.419352] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0d631a4-2f87-4668-9dad-c70169312459 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.444751] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.444751] env[67015]: value = "task-3114375" [ 571.444751] env[67015]: _type = "Task" [ 571.444751] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.452988] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114375, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.750649] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Updating instance_info_cache with network_info: [{"id": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "address": "fa:16:3e:70:21:c7", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e84b9e-ef", "ovs_interfaceid": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.773028] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Releasing lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.773028] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance network_info: |[{"id": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "address": "fa:16:3e:70:21:c7", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e84b9e-ef", "ovs_interfaceid": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 571.773353] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:21:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9e84b9e-ef3c-4953-8f4b-101468d6b95f', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.783219] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Creating folder: Project (616faf5c4d5244e89a15f2d5c8e91dc0). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 571.787021] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88947dfb-2f21-49aa-9bc4-fac0b4ca588a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.798667] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Created folder: Project (616faf5c4d5244e89a15f2d5c8e91dc0) in parent group-v623108. [ 571.798863] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Creating folder: Instances. Parent ref: group-v623130. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 571.799322] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a0ae084-0ec2-4398-bc2d-8a06cc1c9e76 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.808908] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Created folder: Instances in parent group-v623130. [ 571.809372] env[67015]: DEBUG oslo.service.loopingcall [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.809449] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 571.810400] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-600bde37-4217-466a-befb-cd50debf8877 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.832961] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.832961] env[67015]: value = "task-3114378" [ 571.832961] env[67015]: _type = "Task" [ 571.832961] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.841992] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114378, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.958981] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114375, 'name': CreateVM_Task, 'duration_secs': 0.341392} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.959172] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 571.959859] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.960146] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.960339] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.960593] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fbc9825-b209-47f0-8353-ffcb2e2de7dc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.965405] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 571.965405] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52273b59-5517-bc2e-64c3-20527ce08e58" [ 571.965405] env[67015]: _type = "Task" [ 571.965405] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.973329] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52273b59-5517-bc2e-64c3-20527ce08e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.048477] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Successfully updated port: 20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.059072] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.059220] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquired lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.059368] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.103997] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Successfully updated port: 585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.118666] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.119374] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.119374] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.137076] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.212612] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.354978] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114378, 'name': CreateVM_Task, 'duration_secs': 0.395557} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.356296] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.357130] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.480255] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.480255] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.480255] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.480255] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.480424] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.480424] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05b6029f-09b4-427d-878c-0c263bafe076 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.484691] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for the task: (returnval){ [ 572.484691] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]527619c8-3bca-60cb-39e9-329be81f0f94" [ 572.484691] env[67015]: _type = "Task" [ 572.484691] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.495496] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]527619c8-3bca-60cb-39e9-329be81f0f94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.572250] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Received event network-changed-ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.574526] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Refreshing instance network info cache due to event network-changed-ad607619-3a16-4045-956a-b5a81b98e301. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 572.574526] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Acquiring lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.574526] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Acquired lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.574526] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Refreshing network info cache for port ad607619-3a16-4045-956a-b5a81b98e301 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 572.617869] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Received event network-changed-df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.617869] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Refreshing instance network info cache due to event network-changed-df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 572.618018] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.618707] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquired lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.618707] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Refreshing network info cache for port df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 572.683653] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.684294] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.705163] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Updating instance_info_cache with network_info: [{"id": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "address": "fa:16:3e:40:20:e0", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bc7c7a-b4", "ovs_interfaceid": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.717071] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Releasing lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.717384] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance network_info: |[{"id": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "address": "fa:16:3e:40:20:e0", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bc7c7a-b4", "ovs_interfaceid": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 572.717786] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:20:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20bc7c7a-b4c5-4293-abae-bd1073a8e6b0', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.726482] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Creating folder: Project (cf303c3648a940c1a5384db1398b7995). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.727152] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-993ef5ff-5c1e-4225-8968-cf7f315c4be3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.738818] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Created folder: Project (cf303c3648a940c1a5384db1398b7995) in parent group-v623108. [ 572.739156] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Creating folder: Instances. Parent ref: group-v623133. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.739442] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67c181bb-2a02-4713-a5d5-430ffd6936a9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.749991] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Created folder: Instances in parent group-v623133. [ 572.750333] env[67015]: DEBUG oslo.service.loopingcall [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.750583] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.750843] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-167bfdfe-4191-4a09-98df-c3a38cf8e391 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.767325] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Updating instance_info_cache with network_info: [{"id": "585fab4a-2755-4e09-9baa-d847e07dea7e", "address": "fa:16:3e:ea:1d:68", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap585fab4a-27", "ovs_interfaceid": "585fab4a-2755-4e09-9baa-d847e07dea7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.774785] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.774785] env[67015]: value = "task-3114381" [ 572.774785] env[67015]: _type = "Task" [ 572.774785] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.784031] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114381, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.785594] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.785970] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance network_info: |[{"id": "585fab4a-2755-4e09-9baa-d847e07dea7e", "address": "fa:16:3e:ea:1d:68", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap585fab4a-27", "ovs_interfaceid": "585fab4a-2755-4e09-9baa-d847e07dea7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 572.786443] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:1d:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '585fab4a-2755-4e09-9baa-d847e07dea7e', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.794905] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating folder: Project (54ce93521ed148d397c6dd8905557b34). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.795660] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db589636-681b-47c0-964f-6c9326884de3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.814071] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created folder: Project (54ce93521ed148d397c6dd8905557b34) in parent group-v623108. [ 572.814507] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating folder: Instances. Parent ref: group-v623136. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.814885] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d572fc5-8670-4eaf-9165-7ddff1c980c6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.825656] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created folder: Instances in parent group-v623136. [ 572.826186] env[67015]: DEBUG oslo.service.loopingcall [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.826592] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.826950] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2517e7cb-a062-4921-9a62-26de74d71e33 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.853198] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.853198] env[67015]: value = "task-3114384" [ 572.853198] env[67015]: _type = "Task" [ 572.853198] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.863196] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114384, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.000718] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.001605] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.002124] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.069395] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Updated VIF entry in instance network info cache for port ad607619-3a16-4045-956a-b5a81b98e301. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.069395] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Updating instance_info_cache with network_info: [{"id": "ad607619-3a16-4045-956a-b5a81b98e301", "address": "fa:16:3e:94:3a:d9", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad607619-3a", "ovs_interfaceid": "ad607619-3a16-4045-956a-b5a81b98e301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.080384] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Releasing lock "refresh_cache-860124dd-da7e-4beb-832f-7a9ab9580aed" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.080655] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Received event network-vif-plugged-57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.080865] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Acquiring lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.081121] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.081300] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.081849] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] No waiting events found dispatching network-vif-plugged-57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.081849] env[67015]: WARNING nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Received unexpected event network-vif-plugged-57dd67b8-9e16-463e-a1ef-065b4041e800 for instance with vm_state building and task_state spawning. [ 573.082116] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Received event network-changed-57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.082116] env[67015]: DEBUG nova.compute.manager [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Refreshing instance network info cache due to event network-changed-57dd67b8-9e16-463e-a1ef-065b4041e800. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.082522] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Acquiring lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.082522] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Acquired lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.082522] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Refreshing network info cache for port 57dd67b8-9e16-463e-a1ef-065b4041e800 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.291503] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114381, 'name': CreateVM_Task, 'duration_secs': 0.329102} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.292019] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 573.292399] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.292670] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.292924] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 573.293189] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a1736ed-2be3-4390-aefb-7564836525f2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.297715] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for the task: (returnval){ [ 573.297715] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5202bba5-dedc-2ef4-1af3-771cc8fd248b" [ 573.297715] env[67015]: _type = "Task" [ 573.297715] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.305211] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5202bba5-dedc-2ef4-1af3-771cc8fd248b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.366818] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114384, 'name': CreateVM_Task, 'duration_secs': 0.379273} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.366975] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 573.367640] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.469029] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Updated VIF entry in instance network info cache for port df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.469339] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Updating instance_info_cache with network_info: [{"id": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "address": "fa:16:3e:67:8b:5a", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf79b2fb-ee", "ovs_interfaceid": "df79b2fb-eefd-41c6-81b0-1f46ebe4dcf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.482342] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Releasing lock "refresh_cache-bf738778-771f-4a1a-b83e-d786c67dafc0" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.482486] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Received event network-vif-plugged-bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.483043] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.483331] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.483503] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.483671] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] No waiting events found dispatching network-vif-plugged-bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.483848] env[67015]: WARNING nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Received unexpected event network-vif-plugged-bddcd632-a286-4f4c-a565-aebaaa7dbb59 for instance with vm_state building and task_state spawning. [ 573.484104] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Received event network-changed-bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.484180] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Refreshing instance network info cache due to event network-changed-bddcd632-a286-4f4c-a565-aebaaa7dbb59. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.484360] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.484553] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquired lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.484643] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Refreshing network info cache for port bddcd632-a286-4f4c-a565-aebaaa7dbb59 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.593469] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Updated VIF entry in instance network info cache for port 57dd67b8-9e16-463e-a1ef-065b4041e800. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.594191] env[67015]: DEBUG nova.network.neutron [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Updating instance_info_cache with network_info: [{"id": "57dd67b8-9e16-463e-a1ef-065b4041e800", "address": "fa:16:3e:bb:c3:46", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57dd67b8-9e", "ovs_interfaceid": "57dd67b8-9e16-463e-a1ef-065b4041e800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.605837] env[67015]: DEBUG oslo_concurrency.lockutils [req-85b4732d-e823-4548-816c-c007fa8f02aa req-dd4e2114-714f-4ccc-95d1-59eff3a2e4e8 service nova] Releasing lock "refresh_cache-e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.815052] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.815422] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.815902] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.819673] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.823012] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 573.823012] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0db99a2-d007-49bf-b569-9975707ca826 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.827210] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 573.827210] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]528a3f02-2ee5-bd67-7b77-cc9831d13a64" [ 573.827210] env[67015]: _type = "Task" [ 573.827210] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.836171] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]528a3f02-2ee5-bd67-7b77-cc9831d13a64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.981339] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Updated VIF entry in instance network info cache for port bddcd632-a286-4f4c-a565-aebaaa7dbb59. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.983176] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Updating instance_info_cache with network_info: [{"id": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "address": "fa:16:3e:26:42:02", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbddcd632-a2", "ovs_interfaceid": "bddcd632-a286-4f4c-a565-aebaaa7dbb59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.999847] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Releasing lock "refresh_cache-3eb80b23-a4a3-43e6-9620-86bf1eb344f2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.000136] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Received event network-vif-plugged-7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.000348] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.000555] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.000734] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.000907] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] No waiting events found dispatching network-vif-plugged-7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 574.001099] env[67015]: WARNING nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Received unexpected event network-vif-plugged-7e886dc2-6df7-4d0e-957c-f5e018a4c239 for instance with vm_state building and task_state spawning. [ 574.001276] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Received event network-changed-7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.001434] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Refreshing instance network info cache due to event network-changed-7e886dc2-6df7-4d0e-957c-f5e018a4c239. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 574.001618] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.001778] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquired lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.001953] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Refreshing network info cache for port 7e886dc2-6df7-4d0e-957c-f5e018a4c239 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 574.341063] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.341938] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.342377] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.415012] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Updated VIF entry in instance network info cache for port 7e886dc2-6df7-4d0e-957c-f5e018a4c239. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 574.415412] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Updating instance_info_cache with network_info: [{"id": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "address": "fa:16:3e:78:6e:a2", "network": {"id": "86766f70-0b24-4064-8c4e-bc973f5d7d05", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-127923232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba7a189cc3084e6db0d0a432140bf819", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e886dc2-6d", "ovs_interfaceid": "7e886dc2-6df7-4d0e-957c-f5e018a4c239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.437557] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Releasing lock "refresh_cache-1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.437557] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Received event network-vif-plugged-b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.437557] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "13e28171-8074-4660-91cf-f6d569414fc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.437557] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "13e28171-8074-4660-91cf-f6d569414fc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.437743] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Lock "13e28171-8074-4660-91cf-f6d569414fc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.437743] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] No waiting events found dispatching network-vif-plugged-b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 574.437743] env[67015]: WARNING nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Received unexpected event network-vif-plugged-b9e84b9e-ef3c-4953-8f4b-101468d6b95f for instance with vm_state building and task_state spawning. [ 574.437743] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Received event network-changed-b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 574.437851] env[67015]: DEBUG nova.compute.manager [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Refreshing instance network info cache due to event network-changed-b9e84b9e-ef3c-4953-8f4b-101468d6b95f. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 574.437851] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquiring lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.437851] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Acquired lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.440719] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Refreshing network info cache for port b9e84b9e-ef3c-4953-8f4b-101468d6b95f {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 575.267973] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Updated VIF entry in instance network info cache for port b9e84b9e-ef3c-4953-8f4b-101468d6b95f. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 575.267973] env[67015]: DEBUG nova.network.neutron [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Updating instance_info_cache with network_info: [{"id": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "address": "fa:16:3e:70:21:c7", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e84b9e-ef", "ovs_interfaceid": "b9e84b9e-ef3c-4953-8f4b-101468d6b95f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.281227] env[67015]: DEBUG oslo_concurrency.lockutils [req-f74b9766-da5c-4769-adfc-a6ea3e52763f req-c4b970fe-a524-4c04-98ad-7c7800aafe54 service nova] Releasing lock "refresh_cache-13e28171-8074-4660-91cf-f6d569414fc6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.464207] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Received event network-vif-plugged-20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.464207] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquiring lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.464207] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.464207] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.464593] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] No waiting events found dispatching network-vif-plugged-20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 575.464593] env[67015]: WARNING nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Received unexpected event network-vif-plugged-20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 for instance with vm_state building and task_state spawning. [ 575.464761] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Received event network-vif-plugged-585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.464923] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquiring lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.465144] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.465353] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.465519] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] No waiting events found dispatching network-vif-plugged-585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 575.465676] env[67015]: WARNING nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Received unexpected event network-vif-plugged-585fab4a-2755-4e09-9baa-d847e07dea7e for instance with vm_state building and task_state spawning. [ 575.465835] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Received event network-changed-20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.466064] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Refreshing instance network info cache due to event network-changed-20bc7c7a-b4c5-4293-abae-bd1073a8e6b0. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 575.466260] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquiring lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.466397] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquired lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.466549] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Refreshing network info cache for port 20bc7c7a-b4c5-4293-abae-bd1073a8e6b0 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 576.648430] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Updated VIF entry in instance network info cache for port 20bc7c7a-b4c5-4293-abae-bd1073a8e6b0. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 576.648875] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Updating instance_info_cache with network_info: [{"id": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "address": "fa:16:3e:40:20:e0", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bc7c7a-b4", "ovs_interfaceid": "20bc7c7a-b4c5-4293-abae-bd1073a8e6b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.664875] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Releasing lock "refresh_cache-98ad3e5c-065d-4561-890c-46d5ca0a8f7f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.665149] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Received event network-changed-585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.665321] env[67015]: DEBUG nova.compute.manager [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Refreshing instance network info cache due to event network-changed-585fab4a-2755-4e09-9baa-d847e07dea7e. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 576.665526] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquiring lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.665672] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Acquired lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.665843] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Refreshing network info cache for port 585fab4a-2755-4e09-9baa-d847e07dea7e {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 577.564545] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.564949] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.565169] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 577.565293] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 577.596398] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.596672] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.596918] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597049] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597219] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597383] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597539] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597702] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.597884] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.598099] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 577.598280] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 577.598885] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.599282] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.599767] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.600056] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.600315] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.600552] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.600749] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 577.601209] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.616840] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.617339] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.617591] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.617803] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 577.620036] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a326e254-718e-41c9-88a5-1fd74b741835 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.630754] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e4c539-97eb-416e-b9f3-6145e8264eef {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.650451] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a63ef37-613e-40cd-93fc-32a9e34d7901 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.658909] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1782955b-e7f5-4fea-a82d-2d6cc0ecdde3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.693260] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181065MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 577.693472] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.693712] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.785494] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 860124dd-da7e-4beb-832f-7a9ab9580aed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786205] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96efb9a4-3c83-4dea-94f2-d93dead6a2ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786205] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bf738778-771f-4a1a-b83e-d786c67dafc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786205] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786205] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786378] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786378] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786378] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786474] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.786586] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 577.819829] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 577.850186] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 577.851033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 577.851033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 578.067523] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26fc6ea-f995-481f-918c-7905fabbfe39 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.076641] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b20674d-2718-4c20-8707-3d958f435da5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.112223] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1976167-0e54-4840-a9c0-9fcfd16dc979 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.120651] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5defef3d-fffa-4392-b708-7609f38f0554 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.134955] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.144405] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 578.168514] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 578.168718] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.475s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.203654] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Updated VIF entry in instance network info cache for port 585fab4a-2755-4e09-9baa-d847e07dea7e. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 578.204174] env[67015]: DEBUG nova.network.neutron [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Updating instance_info_cache with network_info: [{"id": "585fab4a-2755-4e09-9baa-d847e07dea7e", "address": "fa:16:3e:ea:1d:68", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap585fab4a-27", "ovs_interfaceid": "585fab4a-2755-4e09-9baa-d847e07dea7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.217300] env[67015]: DEBUG oslo_concurrency.lockutils [req-b14dc244-1c3b-4647-9220-1edcfd9e68de req-62c85005-378d-4ee4-8861-cec957bb79b0 service nova] Releasing lock "refresh_cache-781b688b-ec99-4423-99b2-2502c6e8a75d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.409662] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.409971] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.702977] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.703492] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.081998] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d4eca598-4d95-41ee-bdaa-328b7b9d08c3 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "a634ad37-b9f9-40a0-b2c1-6cd8612ba274" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.082297] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d4eca598-4d95-41ee-bdaa-328b7b9d08c3 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "a634ad37-b9f9-40a0-b2c1-6cd8612ba274" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.991474] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0df10f79-42ae-4c71-aee7-4a6a188fa2a1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "85e5e3fd-942a-4a75-98b5-221d37a51dcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.992070] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0df10f79-42ae-4c71-aee7-4a6a188fa2a1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "85e5e3fd-942a-4a75-98b5-221d37a51dcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.230931] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7feca8b3-8585-4934-90bc-80ef23464c76 tempest-ServerActionsTestJSON-2043308483 tempest-ServerActionsTestJSON-2043308483-project-member] Acquiring lock "b10504c5-2770-45e0-bd3a-1fdc342b1397" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.231578] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7feca8b3-8585-4934-90bc-80ef23464c76 tempest-ServerActionsTestJSON-2043308483 tempest-ServerActionsTestJSON-2043308483-project-member] Lock "b10504c5-2770-45e0-bd3a-1fdc342b1397" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.859957] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f050b95-ef5d-4acf-9592-b3e87abdf3aa tempest-ServersTestJSON-1523931028 tempest-ServersTestJSON-1523931028-project-member] Acquiring lock "4df734f8-537a-462b-991e-472c15afeb61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.860621] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f050b95-ef5d-4acf-9592-b3e87abdf3aa tempest-ServersTestJSON-1523931028 tempest-ServersTestJSON-1523931028-project-member] Lock "4df734f8-537a-462b-991e-472c15afeb61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.908463] env[67015]: DEBUG oslo_concurrency.lockutils [None req-036afbd1-24b3-4e4e-86f3-c27bab5a23be tempest-FloatingIPsAssociationTestJSON-2063730022 tempest-FloatingIPsAssociationTestJSON-2063730022-project-member] Acquiring lock "29c344b0-2ec3-44e6-8d33-fd988c26da7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.909086] env[67015]: DEBUG oslo_concurrency.lockutils [None req-036afbd1-24b3-4e4e-86f3-c27bab5a23be tempest-FloatingIPsAssociationTestJSON-2063730022 tempest-FloatingIPsAssociationTestJSON-2063730022-project-member] Lock "29c344b0-2ec3-44e6-8d33-fd988c26da7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.857510] env[67015]: DEBUG oslo_concurrency.lockutils [None req-442784b2-0246-41d8-b804-081596854df1 tempest-ImagesOneServerTestJSON-1640681993 tempest-ImagesOneServerTestJSON-1640681993-project-member] Acquiring lock "8cd6a13f-fae2-43ea-846e-24f4987cf2ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.857760] env[67015]: DEBUG oslo_concurrency.lockutils [None req-442784b2-0246-41d8-b804-081596854df1 tempest-ImagesOneServerTestJSON-1640681993 tempest-ImagesOneServerTestJSON-1640681993-project-member] Lock "8cd6a13f-fae2-43ea-846e-24f4987cf2ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.875243] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f48bfd1-1578-4c08-907c-d9d6a348ed19 tempest-InstanceActionsV221TestJSON-1617409752 tempest-InstanceActionsV221TestJSON-1617409752-project-member] Acquiring lock "ba11c5cc-bd2b-4500-a87e-941e64630c36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.875558] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f48bfd1-1578-4c08-907c-d9d6a348ed19 tempest-InstanceActionsV221TestJSON-1617409752 tempest-InstanceActionsV221TestJSON-1617409752-project-member] Lock "ba11c5cc-bd2b-4500-a87e-941e64630c36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.883613] env[67015]: DEBUG oslo_concurrency.lockutils [None req-33527b3d-474b-41e8-9204-545440cb6478 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "fd673125-b2b3-4a7b-a90d-0452b95d5db8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.883866] env[67015]: DEBUG oslo_concurrency.lockutils [None req-33527b3d-474b-41e8-9204-545440cb6478 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "fd673125-b2b3-4a7b-a90d-0452b95d5db8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.093027] env[67015]: DEBUG oslo_concurrency.lockutils [None req-8526e15b-1266-41c0-b552-5e7a15fc97dc tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Acquiring lock "76c4c961-0d94-445e-a861-b3880ef96d98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.093848] env[67015]: DEBUG oslo_concurrency.lockutils [None req-8526e15b-1266-41c0-b552-5e7a15fc97dc tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Lock "76c4c961-0d94-445e-a861-b3880ef96d98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.616174] env[67015]: DEBUG oslo_concurrency.lockutils [None req-82762d80-84f9-4661-a5db-09f3992bd856 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Acquiring lock "32cb4672-8ebe-472b-b4ff-48bffad679e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.616489] env[67015]: DEBUG oslo_concurrency.lockutils [None req-82762d80-84f9-4661-a5db-09f3992bd856 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Lock "32cb4672-8ebe-472b-b4ff-48bffad679e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.816139] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0b75a1b-9828-4230-bdd4-7b46217a9b6a tempest-ServerRescueTestJSONUnderV235-1226696864 tempest-ServerRescueTestJSONUnderV235-1226696864-project-member] Acquiring lock "a35ac03a-2a8a-4c79-ae3f-5afc1f563964" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.816466] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0b75a1b-9828-4230-bdd4-7b46217a9b6a tempest-ServerRescueTestJSONUnderV235-1226696864 tempest-ServerRescueTestJSONUnderV235-1226696864-project-member] Lock "a35ac03a-2a8a-4c79-ae3f-5afc1f563964" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.098021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dcc5f623-064e-4169-b20d-97a88a7a6e5d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "bcef5f6b-c7b9-413c-b198-b858444a12da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.098563] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dcc5f623-064e-4169-b20d-97a88a7a6e5d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "bcef5f6b-c7b9-413c-b198-b858444a12da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.439479] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f297844-3a5e-4aa9-96a1-c4af40267913 tempest-ServersTestBootFromVolume-375715076 tempest-ServersTestBootFromVolume-375715076-project-member] Acquiring lock "1b24ae9d-f8bc-4c11-9b07-b4a4e435e269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.439776] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f297844-3a5e-4aa9-96a1-c4af40267913 tempest-ServersTestBootFromVolume-375715076 tempest-ServersTestBootFromVolume-375715076-project-member] Lock "1b24ae9d-f8bc-4c11-9b07-b4a4e435e269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.467177] env[67015]: WARNING oslo_vmware.rw_handles [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 615.467177] env[67015]: ERROR oslo_vmware.rw_handles [ 615.467864] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 615.468911] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 615.469271] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Copying Virtual Disk [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/5f6176e2-49ce-45fe-af59-a7b3e24d4275/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 615.469571] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5278b95-ee23-4590-8520-39473994ef5d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.477012] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for the task: (returnval){ [ 615.477012] env[67015]: value = "task-3114396" [ 615.477012] env[67015]: _type = "Task" [ 615.477012] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.485921] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Task: {'id': task-3114396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.988582] env[67015]: DEBUG oslo_vmware.exceptions [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 615.988841] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.992689] env[67015]: ERROR nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 615.992689] env[67015]: Faults: ['InvalidArgument'] [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Traceback (most recent call last): [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] yield resources [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self.driver.spawn(context, instance, image_meta, [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self._fetch_image_if_missing(context, vi) [ 615.992689] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] image_cache(vi, tmp_image_ds_loc) [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] vm_util.copy_virtual_disk( [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] session._wait_for_task(vmdk_copy_task) [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return self.wait_for_task(task_ref) [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return evt.wait() [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] result = hub.switch() [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 615.993121] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return self.greenlet.switch() [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self.f(*self.args, **self.kw) [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] raise exceptions.translate_fault(task_info.error) [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Faults: ['InvalidArgument'] [ 615.993462] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] [ 615.993462] env[67015]: INFO nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Terminating instance [ 615.994806] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.994970] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.995234] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-116be9f0-1537-4d65-8e77-be1910cf406b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.998801] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.998801] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquired lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.998801] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 616.008038] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.008038] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 616.008038] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a33e263c-9b32-43c9-b347-3059a96175c7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.014586] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for the task: (returnval){ [ 616.014586] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52e85321-009b-705d-c79f-24fcdd171cd7" [ 616.014586] env[67015]: _type = "Task" [ 616.014586] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.023764] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52e85321-009b-705d-c79f-24fcdd171cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.042351] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.225424] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.235679] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Releasing lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.236298] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 616.236516] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 616.238013] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc42d02-3516-4f9c-b955-0ccc4f8e5eed {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.250466] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 616.250736] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06bee550-c449-42a0-a937-2c5cbd2a0bae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.278117] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 616.278117] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 616.278117] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Deleting the datastore file [datastore2] 96efb9a4-3c83-4dea-94f2-d93dead6a2ca {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 616.278117] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01978551-b27f-43e5-942a-2bf75d33d368 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.284946] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for the task: (returnval){ [ 616.284946] env[67015]: value = "task-3114398" [ 616.284946] env[67015]: _type = "Task" [ 616.284946] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.294553] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Task: {'id': task-3114398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.528022] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 616.528022] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Creating directory with path [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 616.528022] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1f17782-e88d-4245-92cf-adf0fd95be9e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.539381] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Created directory with path [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.539666] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Fetch image to [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 616.539856] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 616.540848] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295fc80c-c2eb-449c-9b70-b7f876d483af {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.548644] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c072bab-d9a6-4023-b451-45e61e03a3db {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.560738] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e3fd7a-49de-4ed3-9480-e35b17f7f705 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.595481] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4861d5bb-6d46-430f-9d05-46ef20e3aad1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.601768] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e52e7b09-7058-4098-9102-bfe424eb5892 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.624966] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 616.698491] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 616.766996] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 616.767228] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 616.794795] env[67015]: DEBUG oslo_vmware.api [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Task: {'id': task-3114398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04635} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.795077] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 616.795345] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 616.795534] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 616.795936] env[67015]: INFO nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Took 0.56 seconds to destroy the instance on the hypervisor. [ 616.796223] env[67015]: DEBUG oslo.service.loopingcall [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.797302] env[67015]: DEBUG nova.compute.manager [-] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 616.798946] env[67015]: DEBUG nova.compute.claims [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.799061] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.799271] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.285364] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eb5c18-de0b-4b0b-b4e4-165f1b2d8de6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.295546] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a7e44f-a55d-4d64-886d-6b4f54b157c3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.336822] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a30506a-f554-4f4a-af3e-5e0749666798 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.346225] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8def39f-39fe-4afa-8a96-89f797464503 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.359705] env[67015]: DEBUG nova.compute.provider_tree [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.372127] env[67015]: DEBUG nova.scheduler.client.report [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 617.394288] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.595s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.394693] env[67015]: ERROR nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 617.394693] env[67015]: Faults: ['InvalidArgument'] [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Traceback (most recent call last): [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self.driver.spawn(context, instance, image_meta, [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self._fetch_image_if_missing(context, vi) [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] image_cache(vi, tmp_image_ds_loc) [ 617.394693] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] vm_util.copy_virtual_disk( [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] session._wait_for_task(vmdk_copy_task) [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return self.wait_for_task(task_ref) [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return evt.wait() [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] result = hub.switch() [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] return self.greenlet.switch() [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 617.395371] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] self.f(*self.args, **self.kw) [ 617.395827] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 617.395827] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] raise exceptions.translate_fault(task_info.error) [ 617.395827] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 617.395827] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Faults: ['InvalidArgument'] [ 617.395827] env[67015]: ERROR nova.compute.manager [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] [ 617.395827] env[67015]: DEBUG nova.compute.utils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.399953] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Build of instance 96efb9a4-3c83-4dea-94f2-d93dead6a2ca was re-scheduled: A specified parameter was not correct: fileType [ 617.399953] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 617.401166] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 617.401166] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquiring lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.401166] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Acquired lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.401306] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.445969] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.589775] env[67015]: DEBUG nova.network.neutron [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.602373] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Releasing lock "refresh_cache-96efb9a4-3c83-4dea-94f2-d93dead6a2ca" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.602679] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 617.602884] env[67015]: DEBUG nova.compute.manager [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 617.723116] env[67015]: INFO nova.scheduler.client.report [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Deleted allocations for instance 96efb9a4-3c83-4dea-94f2-d93dead6a2ca [ 617.769160] env[67015]: DEBUG oslo_concurrency.lockutils [None req-511aec11-0ce2-4942-9116-59103608ce6f tempest-ServerDiagnosticsV248Test-68294157 tempest-ServerDiagnosticsV248Test-68294157-project-member] Lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.946s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.770700] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 46.963s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.770893] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96efb9a4-3c83-4dea-94f2-d93dead6a2ca] During sync_power_state the instance has a pending task (spawning). Skip. [ 617.772029] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "96efb9a4-3c83-4dea-94f2-d93dead6a2ca" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.791015] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 617.850773] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.851041] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.853315] env[67015]: INFO nova.compute.claims [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.365132] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8c735c-3738-4814-a0cc-bab311740c05 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.373793] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a25e8f-a68b-4414-b715-188c117833ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.418027] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c348ca14-09f7-4dc9-bff2-371d5cd125f8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.427085] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041dfb6-dde5-45a7-9098-4cf4cd383ffc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.441839] env[67015]: DEBUG nova.compute.provider_tree [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.455351] env[67015]: DEBUG nova.scheduler.client.report [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 618.481962] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.631s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.483161] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 618.543985] env[67015]: DEBUG nova.compute.utils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.548885] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 618.549085] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 618.560781] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 618.659422] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 618.675692] env[67015]: DEBUG nova.policy [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f1f741dce8b46e986464e73d4d7e67e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b7d96d44ce04518b431256e76ae123c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 618.693204] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.693204] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.693499] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.693539] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.693656] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.693800] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.694018] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.694178] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.694340] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.694497] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.694688] env[67015]: DEBUG nova.virt.hardware [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.695971] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55373495-ed96-4fe6-be10-271e3f6c04b3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.706388] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb65438-31d3-41cd-9da5-5b77a1f29815 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.738874] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Successfully created port: 2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.891805] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Successfully updated port: 2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.907139] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.907297] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquired lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.907432] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 620.992562] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.516675] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Updating instance_info_cache with network_info: [{"id": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "address": "fa:16:3e:a1:79:98", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c4ec903-b7", "ovs_interfaceid": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.530727] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Releasing lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.532049] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance network_info: |[{"id": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "address": "fa:16:3e:a1:79:98", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c4ec903-b7", "ovs_interfaceid": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 621.532501] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:79:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c4ec903-b728-4253-b181-6a324dc5c4eb', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.539924] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Creating folder: Project (3b7d96d44ce04518b431256e76ae123c). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 621.542283] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcbe537d-a41c-4d11-909d-9a849582d291 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.553781] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Created folder: Project (3b7d96d44ce04518b431256e76ae123c) in parent group-v623108. [ 621.554175] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Creating folder: Instances. Parent ref: group-v623143. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 621.554803] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6492c4c-c633-4a54-aa50-dc78c505200e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.564773] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Created folder: Instances in parent group-v623143. [ 621.564773] env[67015]: DEBUG oslo.service.loopingcall [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 621.564773] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 621.564773] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f2ffb0e-95f6-4796-b2ac-d9f2cb714c39 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.586085] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.586085] env[67015]: value = "task-3114401" [ 621.586085] env[67015]: _type = "Task" [ 621.586085] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.595470] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114401, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.652378] env[67015]: DEBUG nova.compute.manager [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Received event network-vif-plugged-2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 621.652378] env[67015]: DEBUG oslo_concurrency.lockutils [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] Acquiring lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.652378] env[67015]: DEBUG oslo_concurrency.lockutils [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.652378] env[67015]: DEBUG oslo_concurrency.lockutils [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.652544] env[67015]: DEBUG nova.compute.manager [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] No waiting events found dispatching network-vif-plugged-2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 621.652544] env[67015]: WARNING nova.compute.manager [req-b579acac-24d7-4010-92d2-3ee1c5b0ccdf req-f575fbac-2d83-4e4c-b95a-12cfe16c6354 service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Received unexpected event network-vif-plugged-2c4ec903-b728-4253-b181-6a324dc5c4eb for instance with vm_state building and task_state spawning. [ 622.100341] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114401, 'name': CreateVM_Task, 'duration_secs': 0.283524} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.103018] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 622.103018] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.103018] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.103018] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 622.103018] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-508850f3-3514-4a94-a687-e145adba16ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.110259] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for the task: (returnval){ [ 622.110259] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52d9651f-ed7b-ca28-eecc-c8df2544ce9a" [ 622.110259] env[67015]: _type = "Task" [ 622.110259] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.123023] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52d9651f-ed7b-ca28-eecc-c8df2544ce9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.622697] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.622992] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.623215] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.859446] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.859446] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.932885] env[67015]: DEBUG nova.compute.manager [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Received event network-changed-2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.932885] env[67015]: DEBUG nova.compute.manager [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Refreshing instance network info cache due to event network-changed-2c4ec903-b728-4253-b181-6a324dc5c4eb. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 624.932885] env[67015]: DEBUG oslo_concurrency.lockutils [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] Acquiring lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.932885] env[67015]: DEBUG oslo_concurrency.lockutils [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] Acquired lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.932885] env[67015]: DEBUG nova.network.neutron [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Refreshing network info cache for port 2c4ec903-b728-4253-b181-6a324dc5c4eb {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 625.604599] env[67015]: DEBUG nova.network.neutron [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Updated VIF entry in instance network info cache for port 2c4ec903-b728-4253-b181-6a324dc5c4eb. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 625.605768] env[67015]: DEBUG nova.network.neutron [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Updating instance_info_cache with network_info: [{"id": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "address": "fa:16:3e:a1:79:98", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c4ec903-b7", "ovs_interfaceid": "2c4ec903-b728-4253-b181-6a324dc5c4eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.617838] env[67015]: DEBUG oslo_concurrency.lockutils [req-35f45069-3400-4ca0-865e-d720a790ed3e req-5c3c16e4-c069-4938-af9a-b94ff6940f9f service nova] Releasing lock "refresh_cache-0734f630-dea5-4ee0-b890-dd50f3e8b178" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.307390] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b22649bf-5559-4f9b-b2c7-87fdc23df00c tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Acquiring lock "76b9817f-5571-48f6-8144-08d18f635750" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.307747] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b22649bf-5559-4f9b-b2c7-87fdc23df00c tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Lock "76b9817f-5571-48f6-8144-08d18f635750" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.308705] env[67015]: DEBUG oslo_concurrency.lockutils [None req-176e4e6b-9e45-4e99-8f88-a71abf16d2f0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "e0eca599-b9a0-40a5-968d-21ac240f815c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.308970] env[67015]: DEBUG oslo_concurrency.lockutils [None req-176e4e6b-9e45-4e99-8f88-a71abf16d2f0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "e0eca599-b9a0-40a5-968d-21ac240f815c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.114090] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.154502] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.513536] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.514249] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.514249] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 638.514249] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 638.545026] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.545309] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.545809] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.545809] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.546036] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.546296] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.547862] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.547862] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.547862] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.547862] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 638.547862] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 638.548120] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.548120] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.513580] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.513860] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.513987] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.514176] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 639.514332] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.530839] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.531082] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.531897] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.531897] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 639.533289] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e137544-0766-4914-b641-5eb2119930e4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.544032] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbc715c-d7e2-4570-bc46-66081074aed2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.564543] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2521b02b-f2c5-4129-a137-4cf96fd7279b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.573483] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840ace5b-09d8-4f53-a2cc-a09a51179c90 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.607680] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181030MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 639.608181] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.608181] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.699883] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 860124dd-da7e-4beb-832f-7a9ab9580aed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704245] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bf738778-771f-4a1a-b83e-d786c67dafc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704245] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704245] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704245] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704445] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704445] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704445] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704445] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.704671] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 639.719357] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.741821] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.754476] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.768712] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a634ad37-b9f9-40a0-b2c1-6cd8612ba274 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.780911] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 85e5e3fd-942a-4a75-98b5-221d37a51dcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.801407] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b10504c5-2770-45e0-bd3a-1fdc342b1397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.813114] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4df734f8-537a-462b-991e-472c15afeb61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.831556] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 29c344b0-2ec3-44e6-8d33-fd988c26da7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.847276] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8cd6a13f-fae2-43ea-846e-24f4987cf2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.869735] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ba11c5cc-bd2b-4500-a87e-941e64630c36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.887322] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fd673125-b2b3-4a7b-a90d-0452b95d5db8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.902031] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76c4c961-0d94-445e-a861-b3880ef96d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.919574] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 32cb4672-8ebe-472b-b4ff-48bffad679e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.933647] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a35ac03a-2a8a-4c79-ae3f-5afc1f563964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.947159] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bcef5f6b-c7b9-413c-b198-b858444a12da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.966303] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1b24ae9d-f8bc-4c11-9b07-b4a4e435e269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.980801] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 639.999075] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76b9817f-5571-48f6-8144-08d18f635750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.011041] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0eca599-b9a0-40a5-968d-21ac240f815c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.011041] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 640.011041] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 640.524984] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5055de8-eed2-4867-ba3b-e5dbfce5bdfc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.533671] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1460eb17-0f2d-47f4-9b16-d3c2c6e0524c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.568583] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28a1014-81bd-4212-a214-e08de1cfee44 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.581790] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a7192a-2dee-4ac0-ac54-5cc1e270fd2b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.596596] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.610105] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 640.634765] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 640.636027] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.027s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.757298] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5d4bbda1-8893-4b76-b584-179ff7412748 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Acquiring lock "573d1e82-a96d-47e9-89b0-efd69306ed59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.757607] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5d4bbda1-8893-4b76-b584-179ff7412748 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "573d1e82-a96d-47e9-89b0-efd69306ed59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.680358] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bf7c5f91-7852-4a16-a8b4-1430bd404801 tempest-ServerActionsV293TestJSON-982079223 tempest-ServerActionsV293TestJSON-982079223-project-member] Acquiring lock "63420a94-ef64-407a-a032-61a619907b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.680669] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bf7c5f91-7852-4a16-a8b4-1430bd404801 tempest-ServerActionsV293TestJSON-982079223 tempest-ServerActionsV293TestJSON-982079223-project-member] Lock "63420a94-ef64-407a-a032-61a619907b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.187141] env[67015]: WARNING oslo_vmware.rw_handles [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 664.187141] env[67015]: ERROR oslo_vmware.rw_handles [ 664.187826] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 664.189255] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 664.189523] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Copying Virtual Disk [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/2a91e063-884e-46e5-81fa-f2f38fad752d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 664.189848] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dd6f680-82db-4a64-94e8-6edebf594412 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.199057] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for the task: (returnval){ [ 664.199057] env[67015]: value = "task-3114412" [ 664.199057] env[67015]: _type = "Task" [ 664.199057] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.207635] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Task: {'id': task-3114412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.712776] env[67015]: DEBUG oslo_vmware.exceptions [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 664.712776] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.712776] env[67015]: ERROR nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 664.712776] env[67015]: Faults: ['InvalidArgument'] [ 664.712776] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Traceback (most recent call last): [ 664.712776] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 664.712776] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] yield resources [ 664.712776] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 664.712776] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self.driver.spawn(context, instance, image_meta, [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self._fetch_image_if_missing(context, vi) [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] image_cache(vi, tmp_image_ds_loc) [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] vm_util.copy_virtual_disk( [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] session._wait_for_task(vmdk_copy_task) [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return self.wait_for_task(task_ref) [ 664.713249] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return evt.wait() [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] result = hub.switch() [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return self.greenlet.switch() [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self.f(*self.args, **self.kw) [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] raise exceptions.translate_fault(task_info.error) [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Faults: ['InvalidArgument'] [ 664.713534] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] [ 664.713781] env[67015]: INFO nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Terminating instance [ 664.714289] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.714408] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.714662] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa6f25a4-4469-4e8f-ae57-19ce35ccbf25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.716816] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.716981] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquired lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.717168] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 664.725532] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.725899] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 664.727139] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9596896-5d42-46a0-b85d-35cbd251ee06 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.736598] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Waiting for the task: (returnval){ [ 664.736598] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5265f90a-1b79-55f6-102e-51730a7a1454" [ 664.736598] env[67015]: _type = "Task" [ 664.736598] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.745380] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5265f90a-1b79-55f6-102e-51730a7a1454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.749963] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 664.902812] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.913400] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Releasing lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.914029] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 664.914029] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 664.915051] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0195def-29ae-4aa5-9805-fa768ebcfb9a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.923705] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 664.923947] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-235310c1-0900-4085-84cf-b520d4e79fb6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.964789] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 664.964969] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 664.965150] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Deleting the datastore file [datastore2] 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.965513] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b439f88a-0dfe-4992-84f4-005c65055d2e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.973535] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for the task: (returnval){ [ 664.973535] env[67015]: value = "task-3114414" [ 664.973535] env[67015]: _type = "Task" [ 664.973535] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.982417] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Task: {'id': task-3114414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.247157] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 665.247447] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Creating directory with path [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.247687] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdaa3966-c3a5-43d3-9c47-d19411e3454d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.259137] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Created directory with path [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.259502] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Fetch image to [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 665.259742] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 665.260544] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c15aef7-5d8b-4931-818c-056f22968808 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.268031] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9b02fa-0235-4820-98ca-44e949d17f69 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.278168] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51cb298-b9d9-4cfe-a7d0-79e1459c9cfd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.310595] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143366cb-80de-4d89-bc57-e5c752332e87 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.317979] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-702740e6-f559-4111-a2a2-db6b28c901ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.341571] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 665.395654] env[67015]: DEBUG oslo_vmware.rw_handles [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 665.457657] env[67015]: DEBUG oslo_vmware.rw_handles [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 665.457657] env[67015]: DEBUG oslo_vmware.rw_handles [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 665.483618] env[67015]: DEBUG oslo_vmware.api [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Task: {'id': task-3114414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040894} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.483907] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.484150] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 665.484336] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 665.484513] env[67015]: INFO nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Took 0.57 seconds to destroy the instance on the hypervisor. [ 665.484747] env[67015]: DEBUG oslo.service.loopingcall [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.484955] env[67015]: DEBUG nova.compute.manager [-] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 665.487111] env[67015]: DEBUG nova.compute.claims [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 665.487284] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.487494] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.894841] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f5c374-271c-4d23-b673-54fec205a623 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.902721] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0215fb6d-8626-4a32-95f5-44b5739f3576 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.932774] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b382b80-d56d-477c-ba5f-81496a8df570 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.940316] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7923dfe-c785-4239-9e7d-e24501bf73fd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.954326] env[67015]: DEBUG nova.compute.provider_tree [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.963676] env[67015]: DEBUG nova.scheduler.client.report [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 665.981197] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.494s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.981728] env[67015]: ERROR nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.981728] env[67015]: Faults: ['InvalidArgument'] [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Traceback (most recent call last): [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self.driver.spawn(context, instance, image_meta, [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self._fetch_image_if_missing(context, vi) [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] image_cache(vi, tmp_image_ds_loc) [ 665.981728] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] vm_util.copy_virtual_disk( [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] session._wait_for_task(vmdk_copy_task) [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return self.wait_for_task(task_ref) [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return evt.wait() [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] result = hub.switch() [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] return self.greenlet.switch() [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 665.982011] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] self.f(*self.args, **self.kw) [ 665.982590] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 665.982590] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] raise exceptions.translate_fault(task_info.error) [ 665.982590] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.982590] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Faults: ['InvalidArgument'] [ 665.982590] env[67015]: ERROR nova.compute.manager [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] [ 665.982590] env[67015]: DEBUG nova.compute.utils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 665.984178] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Build of instance 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 was re-scheduled: A specified parameter was not correct: fileType [ 665.984178] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 665.984557] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 665.984785] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquiring lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.984931] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Acquired lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.985105] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.033376] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.166049] env[67015]: DEBUG nova.network.neutron [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.193459] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Releasing lock "refresh_cache-994ad7c4-4a41-49b1-98b8-efd2f2b134e7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.193459] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 666.193459] env[67015]: DEBUG nova.compute.manager [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 666.284282] env[67015]: INFO nova.scheduler.client.report [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Deleted allocations for instance 994ad7c4-4a41-49b1-98b8-efd2f2b134e7 [ 666.302461] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea397835-aae6-4bd0-a5fe-028678eca6c7 tempest-ServersAdmin275Test-981334059 tempest-ServersAdmin275Test-981334059-project-member] Lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.177s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.303975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 95.496s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.304191] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 994ad7c4-4a41-49b1-98b8-efd2f2b134e7] During sync_power_state the instance has a pending task (spawning). Skip. [ 666.304374] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "994ad7c4-4a41-49b1-98b8-efd2f2b134e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.321041] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 666.369974] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.370366] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.371905] env[67015]: INFO nova.compute.claims [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.784157] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab31b672-892a-420a-94ee-32a97529cdf0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.791900] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001cf13b-a7c2-4e98-9ffa-2e85aa7eb549 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.824694] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ca67e6-be50-4879-859d-9b8338665d4d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.832464] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00c4642-4b38-4186-ad63-147ce3c1d887 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.846486] env[67015]: DEBUG nova.compute.provider_tree [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.856233] env[67015]: DEBUG nova.scheduler.client.report [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 666.871439] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.501s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.872089] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 666.908044] env[67015]: DEBUG nova.compute.utils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 666.908961] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 666.909263] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 666.916669] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 666.981030] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 667.007274] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 667.007274] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 667.007274] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.007401] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 667.007401] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.007401] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 667.008024] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 667.008024] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 667.008024] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 667.008161] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 667.008268] env[67015]: DEBUG nova.virt.hardware [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 667.009136] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37aea08c-95ff-4189-92ca-187a2652e0df {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.013330] env[67015]: DEBUG nova.policy [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccfeb618e7c1470fa20f491d3c78817a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '699f49af0394495bb00c4a455a1e744d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 667.019815] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff64a38-bf52-424f-9ba4-acee4210d0ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.594180] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Successfully created port: 70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.860176] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Successfully updated port: 70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.878129] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.878301] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquired lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.878457] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.973985] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.304662] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Updating instance_info_cache with network_info: [{"id": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "address": "fa:16:3e:a9:e2:6e", "network": {"id": "2a90aa65-8166-481b-8708-d819bc48495e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-869756270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "699f49af0394495bb00c4a455a1e744d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70e2cfb9-1a", "ovs_interfaceid": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.325978] env[67015]: DEBUG nova.compute.manager [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Received event network-vif-plugged-70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 669.326541] env[67015]: DEBUG oslo_concurrency.lockutils [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] Acquiring lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.326932] env[67015]: DEBUG oslo_concurrency.lockutils [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.327155] env[67015]: DEBUG oslo_concurrency.lockutils [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.327335] env[67015]: DEBUG nova.compute.manager [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] No waiting events found dispatching network-vif-plugged-70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 669.327505] env[67015]: WARNING nova.compute.manager [req-7901b646-8174-4086-9e84-1e9c6860a1d5 req-6c0f602e-1b94-4ac9-b00f-21a39174ea42 service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Received unexpected event network-vif-plugged-70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 for instance with vm_state building and task_state spawning. [ 669.329086] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Releasing lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.329474] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance network_info: |[{"id": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "address": "fa:16:3e:a9:e2:6e", "network": {"id": "2a90aa65-8166-481b-8708-d819bc48495e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-869756270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "699f49af0394495bb00c4a455a1e744d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70e2cfb9-1a", "ovs_interfaceid": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 669.330077] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:e2:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.338571] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Creating folder: Project (699f49af0394495bb00c4a455a1e744d). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 669.339326] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8d6ec22-df99-474f-be4e-7e543644c717 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.350574] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Created folder: Project (699f49af0394495bb00c4a455a1e744d) in parent group-v623108. [ 669.350663] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Creating folder: Instances. Parent ref: group-v623150. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 669.351609] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-734212f5-e366-495b-9b4a-662a9b480ae2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.389889] env[67015]: DEBUG nova.compute.manager [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Received event network-changed-70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 671.390169] env[67015]: DEBUG nova.compute.manager [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Refreshing instance network info cache due to event network-changed-70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 671.390328] env[67015]: DEBUG oslo_concurrency.lockutils [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] Acquiring lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.390470] env[67015]: DEBUG oslo_concurrency.lockutils [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] Acquired lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.390640] env[67015]: DEBUG nova.network.neutron [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Refreshing network info cache for port 70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 671.931705] env[67015]: DEBUG nova.network.neutron [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Updated VIF entry in instance network info cache for port 70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 671.931705] env[67015]: DEBUG nova.network.neutron [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Updating instance_info_cache with network_info: [{"id": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "address": "fa:16:3e:a9:e2:6e", "network": {"id": "2a90aa65-8166-481b-8708-d819bc48495e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-869756270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "699f49af0394495bb00c4a455a1e744d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70e2cfb9-1a", "ovs_interfaceid": "70e2cfb9-1aee-4a00-bb4c-c5aac7ea5940", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.941751] env[67015]: DEBUG oslo_concurrency.lockutils [req-877d3d96-8283-4703-af70-2f9ab8f560a6 req-6c64b179-592e-4f52-8b6a-7ee4837a1cde service nova] Releasing lock "refresh_cache-aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.291186] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Created folder: Instances in parent group-v623150. [ 672.291462] env[67015]: DEBUG oslo.service.loopingcall [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.291654] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 672.291864] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ac644aa-0a12-4019-9a7b-9e26645d3494 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.312974] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.312974] env[67015]: value = "task-3114417" [ 672.312974] env[67015]: _type = "Task" [ 672.312974] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.321090] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114417, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.823406] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114417, 'name': CreateVM_Task, 'duration_secs': 0.27487} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.823666] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 672.824223] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.824388] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.824720] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 672.825020] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acfc16f3-010b-4158-971e-69775c0a2056 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.829271] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for the task: (returnval){ [ 672.829271] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52dcca59-6f8c-2ca4-6cf8-38eea2180dfe" [ 672.829271] env[67015]: _type = "Task" [ 672.829271] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.837714] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52dcca59-6f8c-2ca4-6cf8-38eea2180dfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.339235] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.339502] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.339709] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.799890] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.800230] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.630979] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.631385] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.631597] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.631786] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.631974] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.632158] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.643577] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.643788] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.643960] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.644139] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 699.645257] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9043ced9-bd9f-4cfd-bb97-4d274d6bb307 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.655024] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbf6346-f132-483a-91bf-9fbfb0f7ff4d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.668999] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3c1f24-6b26-4a67-8628-3b08319e435d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.675586] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76493ac-b1f0-4bcc-ae35-825f8d9bae66 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.707679] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181050MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 699.707833] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.708050] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.781989] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 860124dd-da7e-4beb-832f-7a9ab9580aed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782177] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bf738778-771f-4a1a-b83e-d786c67dafc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782312] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782438] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782558] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782799] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782899] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.782953] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.783055] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.783182] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 699.798213] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.809831] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.820462] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a634ad37-b9f9-40a0-b2c1-6cd8612ba274 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.830936] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 85e5e3fd-942a-4a75-98b5-221d37a51dcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.841353] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b10504c5-2770-45e0-bd3a-1fdc342b1397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.851473] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4df734f8-537a-462b-991e-472c15afeb61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.865231] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 29c344b0-2ec3-44e6-8d33-fd988c26da7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.874632] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8cd6a13f-fae2-43ea-846e-24f4987cf2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.887217] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ba11c5cc-bd2b-4500-a87e-941e64630c36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.898399] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fd673125-b2b3-4a7b-a90d-0452b95d5db8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.910421] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76c4c961-0d94-445e-a861-b3880ef96d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.919506] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 32cb4672-8ebe-472b-b4ff-48bffad679e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.932429] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a35ac03a-2a8a-4c79-ae3f-5afc1f563964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.943448] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bcef5f6b-c7b9-413c-b198-b858444a12da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.952864] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1b24ae9d-f8bc-4c11-9b07-b4a4e435e269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.962532] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.973412] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76b9817f-5571-48f6-8144-08d18f635750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.981798] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0eca599-b9a0-40a5-968d-21ac240f815c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.994496] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 573d1e82-a96d-47e9-89b0-efd69306ed59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.003160] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 63420a94-ef64-407a-a032-61a619907b56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.014278] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.014278] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 700.014278] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 700.355766] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60571996-fcaa-43f3-9ff5-f708d75b6726 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.363535] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da43ca4-58ab-41b9-94a7-f09271bab0bf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.392248] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d043953-0f7d-4303-8fb7-037c0f810873 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.399241] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abad974e-c319-4711-b656-8d21524d817c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.413705] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.421924] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 700.437311] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 700.437480] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.729s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.320124] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.320424] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 701.320464] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 701.346336] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.346517] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.346656] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.346788] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.346916] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347203] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347376] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347506] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347626] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347746] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 701.347867] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 701.514243] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.514541] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.514689] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 714.206367] env[67015]: WARNING oslo_vmware.rw_handles [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 714.206367] env[67015]: ERROR oslo_vmware.rw_handles [ 714.206950] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 714.208336] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 714.208579] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Copying Virtual Disk [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/58dc533a-c740-490c-83d8-6d986511733f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 714.208852] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18d999b8-720f-4705-b3ec-df2316ae858b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.217114] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Waiting for the task: (returnval){ [ 714.217114] env[67015]: value = "task-3114418" [ 714.217114] env[67015]: _type = "Task" [ 714.217114] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.224528] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Task: {'id': task-3114418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.728036] env[67015]: DEBUG oslo_vmware.exceptions [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 714.728036] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.728036] env[67015]: ERROR nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.728036] env[67015]: Faults: ['InvalidArgument'] [ 714.728036] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Traceback (most recent call last): [ 714.728036] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 714.728036] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] yield resources [ 714.728036] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 714.728036] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self.driver.spawn(context, instance, image_meta, [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self._fetch_image_if_missing(context, vi) [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] image_cache(vi, tmp_image_ds_loc) [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] vm_util.copy_virtual_disk( [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] session._wait_for_task(vmdk_copy_task) [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return self.wait_for_task(task_ref) [ 714.728941] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return evt.wait() [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] result = hub.switch() [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return self.greenlet.switch() [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self.f(*self.args, **self.kw) [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] raise exceptions.translate_fault(task_info.error) [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Faults: ['InvalidArgument'] [ 714.729472] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] [ 714.729750] env[67015]: INFO nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Terminating instance [ 714.729849] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.730876] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 714.730876] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f52ed1d-eebe-4f3d-848e-fd31bd371c8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.732442] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 714.732636] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 714.733408] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc7024c-dec4-439d-b1c8-8db398b95b36 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.740206] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 714.740427] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79520b22-4a0a-4e28-a449-73eaf9f1cf03 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.742591] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 714.742767] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 714.743743] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5fc3b12-26a3-4221-ac89-429929342282 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.748377] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for the task: (returnval){ [ 714.748377] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52e1e518-cb39-2622-7470-bd0fb3ccfe43" [ 714.748377] env[67015]: _type = "Task" [ 714.748377] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.755326] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52e1e518-cb39-2622-7470-bd0fb3ccfe43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.800299] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 714.800533] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 714.800718] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Deleting the datastore file [datastore2] bf738778-771f-4a1a-b83e-d786c67dafc0 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 714.800984] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af8da4e7-8cff-449e-9ab5-28efe3b2384d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.807829] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Waiting for the task: (returnval){ [ 714.807829] env[67015]: value = "task-3114420" [ 714.807829] env[67015]: _type = "Task" [ 714.807829] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.816330] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Task: {'id': task-3114420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.261634] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 715.261634] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Creating directory with path [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.261634] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b5c4c7-b384-4a0b-b1bd-089629152392 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.271510] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Created directory with path [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.271718] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Fetch image to [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 715.271889] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 715.272651] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e9691b-5ae1-4c8d-94de-0ee26b86ff89 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.279449] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc93ba54-b0f2-436e-9e11-49cd7a760e45 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.288512] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cc9756-2397-4125-8be3-beced262eb37 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.322825] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6881e32a-bc36-4b48-b9b2-5b677940705b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.329854] env[67015]: DEBUG oslo_vmware.api [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Task: {'id': task-3114420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06869} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.331336] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.331545] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 715.331720] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 715.331893] env[67015]: INFO nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 715.334050] env[67015]: DEBUG nova.compute.claims [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 715.334237] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.334490] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.336968] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ba24a22-5274-473c-91fb-111505926f5b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.359945] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 715.412997] env[67015]: DEBUG oslo_vmware.rw_handles [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 715.474053] env[67015]: DEBUG oslo_vmware.rw_handles [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 715.474053] env[67015]: DEBUG oslo_vmware.rw_handles [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 715.812950] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad272c3-571c-47ae-9cd3-2b5e7da6f982 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.820850] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52bb512-66ef-4e1f-b7cd-9ee2d6b31c23 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.851047] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fa9965-aee9-4156-9856-3cba2ce3bef5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.858225] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9369c783-7915-4cad-8ce4-cfecf096580f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.872564] env[67015]: DEBUG nova.compute.provider_tree [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.881656] env[67015]: DEBUG nova.scheduler.client.report [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 715.896699] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.562s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.897246] env[67015]: ERROR nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 715.897246] env[67015]: Faults: ['InvalidArgument'] [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Traceback (most recent call last): [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self.driver.spawn(context, instance, image_meta, [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self._fetch_image_if_missing(context, vi) [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] image_cache(vi, tmp_image_ds_loc) [ 715.897246] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] vm_util.copy_virtual_disk( [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] session._wait_for_task(vmdk_copy_task) [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return self.wait_for_task(task_ref) [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return evt.wait() [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] result = hub.switch() [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] return self.greenlet.switch() [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 715.897580] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] self.f(*self.args, **self.kw) [ 715.897921] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 715.897921] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] raise exceptions.translate_fault(task_info.error) [ 715.897921] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 715.897921] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Faults: ['InvalidArgument'] [ 715.897921] env[67015]: ERROR nova.compute.manager [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] [ 715.898050] env[67015]: DEBUG nova.compute.utils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 715.899430] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Build of instance bf738778-771f-4a1a-b83e-d786c67dafc0 was re-scheduled: A specified parameter was not correct: fileType [ 715.899430] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 715.899807] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 715.899978] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 715.900150] env[67015]: DEBUG nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 715.900316] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 716.587617] env[67015]: DEBUG nova.network.neutron [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.603198] env[67015]: INFO nova.compute.manager [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] Took 0.70 seconds to deallocate network for instance. [ 716.711052] env[67015]: INFO nova.scheduler.client.report [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Deleted allocations for instance bf738778-771f-4a1a-b83e-d786c67dafc0 [ 716.733798] env[67015]: DEBUG oslo_concurrency.lockutils [None req-34bf5945-30d4-41d3-a571-3304cb2767ed tempest-TenantUsagesTestJSON-1559672741 tempest-TenantUsagesTestJSON-1559672741-project-member] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.747s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.735018] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 145.927s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.735230] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: bf738778-771f-4a1a-b83e-d786c67dafc0] During sync_power_state the instance has a pending task (spawning). Skip. [ 716.735398] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "bf738778-771f-4a1a-b83e-d786c67dafc0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.744811] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 716.796050] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.796702] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.798174] env[67015]: INFO nova.compute.claims [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.186161] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51797c46-ad8e-4243-876f-c3b0ec9e858c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.193647] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eff6f7-0d4c-44cd-a23c-55818c681a4a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.224163] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f800cb-95b3-41be-93bb-be65bf446a1a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.231377] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a605eea-372f-4b1a-babc-4c46618acb09 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.243968] env[67015]: DEBUG nova.compute.provider_tree [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.252314] env[67015]: DEBUG nova.scheduler.client.report [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 717.267305] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.471s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.267786] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 717.301958] env[67015]: DEBUG nova.compute.utils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 717.302515] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 717.302734] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 717.310631] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 717.372882] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 717.408152] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:18:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='338060404',id=21,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-269059638',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.408427] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.408588] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.408817] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.408916] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.409074] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.409290] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.409453] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.409623] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.409916] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.409982] env[67015]: DEBUG nova.virt.hardware [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.411063] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931e699d-0cfb-4439-9573-89e5237a59cc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.415227] env[67015]: DEBUG nova.policy [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcd86343dd874383927bdb59fb9233f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83e6fb8a46314fe88b1d7e8eac137b28', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 717.421757] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c9ca64-397c-4bf6-af89-d72983ed9cf9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.046834] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Successfully created port: 1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.289063] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Successfully updated port: 1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.303299] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.303487] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquired lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.303721] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.384586] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 719.702133] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Updating instance_info_cache with network_info: [{"id": "1b6db968-fcb1-496d-87a7-620bed27f24d", "address": "fa:16:3e:1d:57:29", "network": {"id": "7a5d9a89-b241-4062-8e76-5b981dc5f23d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1170036161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e6fb8a46314fe88b1d7e8eac137b28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b6db968-fc", "ovs_interfaceid": "1b6db968-fcb1-496d-87a7-620bed27f24d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.715387] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Releasing lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.715680] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance network_info: |[{"id": "1b6db968-fcb1-496d-87a7-620bed27f24d", "address": "fa:16:3e:1d:57:29", "network": {"id": "7a5d9a89-b241-4062-8e76-5b981dc5f23d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1170036161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e6fb8a46314fe88b1d7e8eac137b28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b6db968-fc", "ovs_interfaceid": "1b6db968-fcb1-496d-87a7-620bed27f24d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 719.716088] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:57:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a4d142-3f97-47fe-b074-58923c46815e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b6db968-fcb1-496d-87a7-620bed27f24d', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.724654] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Creating folder: Project (83e6fb8a46314fe88b1d7e8eac137b28). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 719.724654] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-405f9cdf-202e-4e73-9b3c-90555daf2fcc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.736077] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Created folder: Project (83e6fb8a46314fe88b1d7e8eac137b28) in parent group-v623108. [ 719.736288] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Creating folder: Instances. Parent ref: group-v623153. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 719.736521] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff7ae88a-309a-4dc3-aba3-aec2f8589f25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.745363] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Created folder: Instances in parent group-v623153. [ 719.745587] env[67015]: DEBUG oslo.service.loopingcall [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.746053] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 719.746053] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56434909-0b77-4ebd-99ca-d7daef5507c6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.765183] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.765183] env[67015]: value = "task-3114423" [ 719.765183] env[67015]: _type = "Task" [ 719.765183] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.772429] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114423, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.784612] env[67015]: DEBUG nova.compute.manager [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Received event network-vif-plugged-1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.784897] env[67015]: DEBUG oslo_concurrency.lockutils [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] Acquiring lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.785136] env[67015]: DEBUG oslo_concurrency.lockutils [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.785300] env[67015]: DEBUG oslo_concurrency.lockutils [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.785464] env[67015]: DEBUG nova.compute.manager [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] No waiting events found dispatching network-vif-plugged-1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 719.785694] env[67015]: WARNING nova.compute.manager [req-4c315e99-04ae-41bb-ac68-24ea48559042 req-d127f5b6-bd2a-4641-9e56-f8d834afc100 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Received unexpected event network-vif-plugged-1b6db968-fcb1-496d-87a7-620bed27f24d for instance with vm_state building and task_state spawning. [ 720.274656] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114423, 'name': CreateVM_Task, 'duration_secs': 0.2972} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.274835] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 720.275516] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.275681] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.275997] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 720.276260] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42fd0e1a-3641-495e-b483-622a33162f28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.280621] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for the task: (returnval){ [ 720.280621] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]527291b0-6071-e95a-c6f4-091be87dbbe8" [ 720.280621] env[67015]: _type = "Task" [ 720.280621] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.288524] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]527291b0-6071-e95a-c6f4-091be87dbbe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.791020] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.791372] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.792092] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.946181] env[67015]: DEBUG nova.compute.manager [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Received event network-changed-1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.946432] env[67015]: DEBUG nova.compute.manager [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Refreshing instance network info cache due to event network-changed-1b6db968-fcb1-496d-87a7-620bed27f24d. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.946562] env[67015]: DEBUG oslo_concurrency.lockutils [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] Acquiring lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.946730] env[67015]: DEBUG oslo_concurrency.lockutils [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] Acquired lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.946932] env[67015]: DEBUG nova.network.neutron [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Refreshing network info cache for port 1b6db968-fcb1-496d-87a7-620bed27f24d {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 722.403332] env[67015]: DEBUG nova.network.neutron [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Updated VIF entry in instance network info cache for port 1b6db968-fcb1-496d-87a7-620bed27f24d. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 722.403802] env[67015]: DEBUG nova.network.neutron [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Updating instance_info_cache with network_info: [{"id": "1b6db968-fcb1-496d-87a7-620bed27f24d", "address": "fa:16:3e:1d:57:29", "network": {"id": "7a5d9a89-b241-4062-8e76-5b981dc5f23d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1170036161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e6fb8a46314fe88b1d7e8eac137b28", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b6db968-fc", "ovs_interfaceid": "1b6db968-fcb1-496d-87a7-620bed27f24d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.413510] env[67015]: DEBUG oslo_concurrency.lockutils [req-b1f70a82-065c-4bdf-b476-208d459e690b req-854141f0-7c58-4ac3-a902-f296d52795c7 service nova] Releasing lock "refresh_cache-66fa7689-aea7-4b88-b63c-0754f5e99d51" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.177698] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "30f91210-0318-4912-808b-843c2cd04ea1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.177967] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.362780] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "860124dd-da7e-4beb-832f-7a9ab9580aed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.509903] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.513601] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 759.513979] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.509785] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.534827] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.536945] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 760.536945] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 760.557329] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.557545] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.557715] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.557913] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558044] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558213] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558383] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558538] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558690] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558841] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 760.558995] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 760.559492] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.514336] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.514574] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.514749] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.514937] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 761.515115] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.529922] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.530162] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.530331] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.530486] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 761.532043] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd13264-7f20-408b-9eb4-ef6b485d18a6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.540676] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c153d28-3f3a-44e0-9d5c-6bff5f8dfb15 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.554485] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be34191-0ad5-4936-a86a-19e9d75fbe7c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.560731] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9b476f-e67f-41d0-bf5f-7268d93a557e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.588774] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181056MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 761.588929] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.590258] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.683807] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 860124dd-da7e-4beb-832f-7a9ab9580aed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684011] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684148] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684274] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684395] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684517] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684637] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684758] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.684899] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.685037] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.697179] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.708875] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a634ad37-b9f9-40a0-b2c1-6cd8612ba274 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.720103] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 85e5e3fd-942a-4a75-98b5-221d37a51dcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.730534] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b10504c5-2770-45e0-bd3a-1fdc342b1397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.740526] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4df734f8-537a-462b-991e-472c15afeb61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.751241] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 29c344b0-2ec3-44e6-8d33-fd988c26da7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.761872] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8cd6a13f-fae2-43ea-846e-24f4987cf2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.772164] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ba11c5cc-bd2b-4500-a87e-941e64630c36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.784392] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fd673125-b2b3-4a7b-a90d-0452b95d5db8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.793682] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76c4c961-0d94-445e-a861-b3880ef96d98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.804019] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 32cb4672-8ebe-472b-b4ff-48bffad679e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.813832] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a35ac03a-2a8a-4c79-ae3f-5afc1f563964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.823725] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bcef5f6b-c7b9-413c-b198-b858444a12da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.832958] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1b24ae9d-f8bc-4c11-9b07-b4a4e435e269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.843048] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.854094] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76b9817f-5571-48f6-8144-08d18f635750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.864166] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0eca599-b9a0-40a5-968d-21ac240f815c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.874908] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 573d1e82-a96d-47e9-89b0-efd69306ed59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.884923] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 63420a94-ef64-407a-a032-61a619907b56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.894821] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.904316] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.904559] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 761.904714] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 762.247585] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4beec84-08a2-4825-818a-f93a2a462110 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.255321] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc122ab-3405-4a51-ba2a-7a5b4bb2b726 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.285571] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5609d588-f930-428a-b892-8e77e115759e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.292494] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040c9ff9-5f8a-4698-8cb0-12fc1b74d37a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.305491] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.313993] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 762.328371] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 762.328546] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.739s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.223488] env[67015]: WARNING oslo_vmware.rw_handles [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 764.223488] env[67015]: ERROR oslo_vmware.rw_handles [ 764.224153] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 764.225904] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 764.226237] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Copying Virtual Disk [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/3062e8c8-d556-4dfc-a8a0-e8b268be3384/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 764.226566] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-025cb888-0bbf-4730-9c72-e46bdfd47cd7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.234994] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for the task: (returnval){ [ 764.234994] env[67015]: value = "task-3114424" [ 764.234994] env[67015]: _type = "Task" [ 764.234994] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.242066] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Task: {'id': task-3114424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.744726] env[67015]: DEBUG oslo_vmware.exceptions [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 764.745044] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.745608] env[67015]: ERROR nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.745608] env[67015]: Faults: ['InvalidArgument'] [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Traceback (most recent call last): [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] yield resources [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self.driver.spawn(context, instance, image_meta, [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self._fetch_image_if_missing(context, vi) [ 764.745608] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] image_cache(vi, tmp_image_ds_loc) [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] vm_util.copy_virtual_disk( [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] session._wait_for_task(vmdk_copy_task) [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return self.wait_for_task(task_ref) [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return evt.wait() [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] result = hub.switch() [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 764.745941] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return self.greenlet.switch() [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self.f(*self.args, **self.kw) [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] raise exceptions.translate_fault(task_info.error) [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Faults: ['InvalidArgument'] [ 764.746276] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] [ 764.746276] env[67015]: INFO nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Terminating instance [ 764.747518] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.748092] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.748092] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b180201b-d6db-458d-8c7f-12ff3cd01629 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.750256] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 764.750418] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 764.751155] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9717837e-9284-49b7-8245-bd24943ea80e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.757941] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 764.758195] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-912ba47a-3f4a-4a84-886b-e632952c6e17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.760365] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.760539] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 764.761464] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196eb1ab-c9f8-48a8-9fe2-b5300dfebc27 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.766307] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for the task: (returnval){ [ 764.766307] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52863d8a-02de-24de-694c-7887f84f647e" [ 764.766307] env[67015]: _type = "Task" [ 764.766307] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.779693] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 764.779916] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Creating directory with path [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.780141] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9090a86c-5e4f-42a9-86db-5512ec025333 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.800134] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Created directory with path [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.800360] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Fetch image to [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 764.800531] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 764.801336] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1824829a-d16e-41a5-bdbe-9f4988ed1797 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.808228] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c80680-97bd-4075-abec-9cc1fccd2584 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.816973] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00a62aa-eb0b-403f-bfe0-5ab43528c64e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.851110] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c7a10d-cd0a-4d0c-a85c-ea9385d59f9c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.857757] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f1f7a67-550f-44e3-bdeb-3bea695f3ced {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.874051] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 764.874298] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 764.874483] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Deleting the datastore file [datastore2] 860124dd-da7e-4beb-832f-7a9ab9580aed {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.874790] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37a63681-35d0-4a95-b83d-36a448c98a91 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.878952] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 764.882228] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for the task: (returnval){ [ 764.882228] env[67015]: value = "task-3114426" [ 764.882228] env[67015]: _type = "Task" [ 764.882228] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.889773] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Task: {'id': task-3114426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.932994] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 764.998420] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 764.998609] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 765.005164] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.108955] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.392490] env[67015]: DEBUG oslo_vmware.api [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Task: {'id': task-3114426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085263} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.392799] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.392935] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 765.393131] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 765.393310] env[67015]: INFO nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Took 0.64 seconds to destroy the instance on the hypervisor. [ 765.395515] env[67015]: DEBUG nova.compute.claims [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 765.395690] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.395902] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.799097] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f787e6f0-02c7-43a7-b8b5-7f0f82ad2315 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.806378] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01600653-ac6d-4a3f-88b9-cf34f4d1b28e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.837042] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6031abdd-90ba-464f-b73c-7215539e846b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.843796] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca08cfc-58ae-4a50-a269-da21e6966055 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.856771] env[67015]: DEBUG nova.compute.provider_tree [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.866472] env[67015]: DEBUG nova.scheduler.client.report [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 765.880630] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.485s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.881166] env[67015]: ERROR nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 765.881166] env[67015]: Faults: ['InvalidArgument'] [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Traceback (most recent call last): [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self.driver.spawn(context, instance, image_meta, [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self._fetch_image_if_missing(context, vi) [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] image_cache(vi, tmp_image_ds_loc) [ 765.881166] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] vm_util.copy_virtual_disk( [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] session._wait_for_task(vmdk_copy_task) [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return self.wait_for_task(task_ref) [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return evt.wait() [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] result = hub.switch() [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] return self.greenlet.switch() [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 765.881502] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] self.f(*self.args, **self.kw) [ 765.881825] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 765.881825] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] raise exceptions.translate_fault(task_info.error) [ 765.881825] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 765.881825] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Faults: ['InvalidArgument'] [ 765.881825] env[67015]: ERROR nova.compute.manager [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] [ 765.881947] env[67015]: DEBUG nova.compute.utils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 765.883607] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Build of instance 860124dd-da7e-4beb-832f-7a9ab9580aed was re-scheduled: A specified parameter was not correct: fileType [ 765.883607] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 765.884014] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 765.884201] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 765.884357] env[67015]: DEBUG nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 765.884527] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.104927] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.364496] env[67015]: DEBUG nova.network.neutron [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.377718] env[67015]: INFO nova.compute.manager [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Took 0.49 seconds to deallocate network for instance. [ 766.475579] env[67015]: INFO nova.scheduler.client.report [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Deleted allocations for instance 860124dd-da7e-4beb-832f-7a9ab9580aed [ 766.500237] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0803302a-3100-439c-a555-934b7db8362c tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.857s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.501358] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 195.694s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.501540] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] During sync_power_state the instance has a pending task (spawning). Skip. [ 766.502143] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.502385] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 9.140s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.502611] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Acquiring lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.502804] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.502964] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.504891] env[67015]: INFO nova.compute.manager [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Terminating instance [ 766.507101] env[67015]: DEBUG nova.compute.manager [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 766.509157] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 766.509157] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d12cacc-a69b-4485-b162-4eb8b7f9d3e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.521851] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98928288-63d5-4ab5-9529-d8da6c145910 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.541474] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 766.553428] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 860124dd-da7e-4beb-832f-7a9ab9580aed could not be found. [ 766.553624] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 766.553806] env[67015]: INFO nova.compute.manager [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Took 0.05 seconds to destroy the instance on the hypervisor. [ 766.554092] env[67015]: DEBUG oslo.service.loopingcall [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.554549] env[67015]: DEBUG nova.compute.manager [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 766.554614] env[67015]: DEBUG nova.network.neutron [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.602599] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.602961] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.604731] env[67015]: INFO nova.compute.claims [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.625581] env[67015]: DEBUG nova.network.neutron [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.635629] env[67015]: INFO nova.compute.manager [-] [instance: 860124dd-da7e-4beb-832f-7a9ab9580aed] Took 0.08 seconds to deallocate network for instance. [ 766.760157] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6b5c1439-b9f5-4d91-9feb-03c1bd36fe15 tempest-ServerDiagnosticsNegativeTest-714853114 tempest-ServerDiagnosticsNegativeTest-714853114-project-member] Lock "860124dd-da7e-4beb-832f-7a9ab9580aed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.258s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.045259] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8357cf7f-d867-4752-93d4-3981280db2ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.053674] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f363cd-6455-43c1-afd4-798839a0967e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.093479] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a01a49-92d2-45e3-9255-951ac33f1e8f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.101159] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1589a3d8-ae11-4337-9f51-59e04d4f633c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.115366] env[67015]: DEBUG nova.compute.provider_tree [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.125896] env[67015]: DEBUG nova.scheduler.client.report [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 767.144687] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.542s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.145488] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 767.183549] env[67015]: DEBUG nova.compute.utils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.185549] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Not allocating networking since 'none' was specified. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 767.196143] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 767.266479] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 767.295609] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.295868] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.296060] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.296259] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.296406] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.296553] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.296761] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.296918] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.297285] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.297489] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.297841] env[67015]: DEBUG nova.virt.hardware [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.298528] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17ac236-e65c-4e48-8072-81206159f22c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.308360] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae93989f-c4cc-464e-bb2c-ffb02b4b368a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.319794] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance VIF info [] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.325195] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Creating folder: Project (930d7fa36da24c2187dc958a9d15419a). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.325464] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0a8c038-37ac-423d-9846-79f6d3cd1a62 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.335559] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Created folder: Project (930d7fa36da24c2187dc958a9d15419a) in parent group-v623108. [ 767.335742] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Creating folder: Instances. Parent ref: group-v623156. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.336009] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6a230ff-19b8-45ac-8c84-6c8cd948ecf2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.344737] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Created folder: Instances in parent group-v623156. [ 767.344930] env[67015]: DEBUG oslo.service.loopingcall [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.345128] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 767.345315] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdfa5873-88ec-4000-acce-4a65aba325ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.361799] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.361799] env[67015]: value = "task-3114429" [ 767.361799] env[67015]: _type = "Task" [ 767.361799] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.369167] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114429, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.533375] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "781b688b-ec99-4423-99b2-2502c6e8a75d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.607610] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "13e28171-8074-4660-91cf-f6d569414fc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.825523] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.872720] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114429, 'name': CreateVM_Task, 'duration_secs': 0.258688} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.872902] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 767.874641] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.874972] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.875273] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.878223] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a85c383d-6ea7-4e0f-9073-a180d494121c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.883683] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for the task: (returnval){ [ 767.883683] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52acd38b-a97e-77b3-a872-8c5e5e798c2e" [ 767.883683] env[67015]: _type = "Task" [ 767.883683] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.888719] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.892472] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52acd38b-a97e-77b3-a872-8c5e5e798c2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.393081] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.393347] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.393556] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.180362] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.805560] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "843278e1-7d76-4f50-8170-9e335d29326e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.805827] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.836401] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "4a1f7297-67b0-4c57-8c11-101877c27e48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.836713] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "4a1f7297-67b0-4c57-8c11-101877c27e48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.865926] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "0182f659-5d01-4d6f-8242-aaec4efae151" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.866179] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "0182f659-5d01-4d6f-8242-aaec4efae151" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.217273] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.464559] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.676625] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e3c847e3-32b3-47f2-a757-389778c000d8 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "00412418-9727-4ed0-b4ff-92981ddab7ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.676844] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e3c847e3-32b3-47f2-a757-389778c000d8 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "00412418-9727-4ed0-b4ff-92981ddab7ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.535336] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6235af37-c6d1-45f7-94a7-3fe3089b0460 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "b43ae574-1083-42fd-b9aa-d1bf6a2bad2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.535336] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6235af37-c6d1-45f7-94a7-3fe3089b0460 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "b43ae574-1083-42fd-b9aa-d1bf6a2bad2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.931434] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7044fae6-3465-48b3-aace-86f22cb9eb3d tempest-ServersAaction247Test-1091897607 tempest-ServersAaction247Test-1091897607-project-member] Acquiring lock "6a333c84-7e40-4ba0-b60b-a5972720e306" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.931765] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7044fae6-3465-48b3-aace-86f22cb9eb3d tempest-ServersAaction247Test-1091897607 tempest-ServersAaction247Test-1091897607-project-member] Lock "6a333c84-7e40-4ba0-b60b-a5972720e306" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.658512] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4848b5f0-a200-415b-8bbe-1975fb586126 tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Acquiring lock "5f04238c-701e-4ea3-9dde-769ec26a4462" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.658512] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4848b5f0-a200-415b-8bbe-1975fb586126 tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Lock "5f04238c-701e-4ea3-9dde-769ec26a4462" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.322412] env[67015]: DEBUG oslo_concurrency.lockutils [None req-07ab390e-63fb-425c-91cb-773279d29100 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "22f73210-0a16-4aa6-bc1d-d6625a6e4243" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.322833] env[67015]: DEBUG oslo_concurrency.lockutils [None req-07ab390e-63fb-425c-91cb-773279d29100 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "22f73210-0a16-4aa6-bc1d-d6625a6e4243" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.070612] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e5000e44-c6be-469c-a6ea-71f26af01d78 tempest-ServerTagsTestJSON-316550150 tempest-ServerTagsTestJSON-316550150-project-member] Acquiring lock "09674d48-8f73-40f7-8ff3-3d4198a053fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.070894] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e5000e44-c6be-469c-a6ea-71f26af01d78 tempest-ServerTagsTestJSON-316550150 tempest-ServerTagsTestJSON-316550150-project-member] Lock "09674d48-8f73-40f7-8ff3-3d4198a053fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.502397] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "db3de804-63b7-4887-b752-282e70e0f20e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.502397] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.145428] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fbe08c8e-af44-4d57-8db2-2343af481a69 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "ef201b80-65b2-4fa1-8150-8b7a3fbea673" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.145746] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fbe08c8e-af44-4d57-8db2-2343af481a69 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "ef201b80-65b2-4fa1-8150-8b7a3fbea673" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.878018] env[67015]: WARNING oslo_vmware.rw_handles [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 810.878018] env[67015]: ERROR oslo_vmware.rw_handles [ 810.878018] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 810.878614] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 810.878614] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Copying Virtual Disk [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/9298a8c6-550d-4b1d-a28b-6bcc25d91c48/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 810.878997] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed3deef0-cd0f-4dee-9c85-d279c5cb846c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.888143] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for the task: (returnval){ [ 810.888143] env[67015]: value = "task-3114430" [ 810.888143] env[67015]: _type = "Task" [ 810.888143] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.897012] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Task: {'id': task-3114430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.399214] env[67015]: DEBUG oslo_vmware.exceptions [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 811.399701] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.400452] env[67015]: ERROR nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.400452] env[67015]: Faults: ['InvalidArgument'] [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Traceback (most recent call last): [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] yield resources [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self.driver.spawn(context, instance, image_meta, [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self._fetch_image_if_missing(context, vi) [ 811.400452] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] image_cache(vi, tmp_image_ds_loc) [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] vm_util.copy_virtual_disk( [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] session._wait_for_task(vmdk_copy_task) [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return self.wait_for_task(task_ref) [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return evt.wait() [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] result = hub.switch() [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 811.401020] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return self.greenlet.switch() [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self.f(*self.args, **self.kw) [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] raise exceptions.translate_fault(task_info.error) [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Faults: ['InvalidArgument'] [ 811.401634] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] [ 811.401634] env[67015]: INFO nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Terminating instance [ 811.403734] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.403955] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.404293] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0301517a-bcff-412d-b73d-f4cd2531cef4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.407312] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 811.407593] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.408522] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4886cbc-9667-4c8c-8f56-ed425a7fd04e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.412207] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.412406] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 811.413423] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f232d712-d65d-4e67-9d60-4bf40efc312b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.418266] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 811.418760] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80806fc5-6635-44c8-aef6-3a0ea3b24b8b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.421267] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for the task: (returnval){ [ 811.421267] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52ef25a1-32c0-1d36-facc-5f9d28a96598" [ 811.421267] env[67015]: _type = "Task" [ 811.421267] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.428725] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52ef25a1-32c0-1d36-facc-5f9d28a96598, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.503869] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 811.504058] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 811.504309] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Deleting the datastore file [datastore2] 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.504588] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3735587a-b6de-42f0-bd8b-a71299e3b3fe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.516640] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for the task: (returnval){ [ 811.516640] env[67015]: value = "task-3114432" [ 811.516640] env[67015]: _type = "Task" [ 811.516640] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.526417] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Task: {'id': task-3114432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.932132] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 811.932486] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Creating directory with path [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.932619] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4303d944-4bcf-41fe-8c2d-2466e8bed717 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.946523] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Created directory with path [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.947760] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Fetch image to [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 811.947760] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 811.947760] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50178c17-e7b2-436e-a1bc-fa3c26dcf6d7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.954997] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a724c613-5ece-417d-bdde-b3cd976a923d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.967020] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043d8901-2dfe-4eb7-aa96-2f9c3d246674 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.994613] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d17fc80-74b9-4e75-9a81-5b2814af0808 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.001064] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-328a951a-91bf-4ab5-bf20-66b7edc81f8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.024475] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 812.032484] env[67015]: DEBUG oslo_vmware.api [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Task: {'id': task-3114432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073588} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.032749] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 812.032920] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 812.033198] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.033453] env[67015]: INFO nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Took 0.63 seconds to destroy the instance on the hypervisor. [ 812.036071] env[67015]: DEBUG nova.compute.claims [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 812.036316] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.036580] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.100649] env[67015]: DEBUG oslo_vmware.rw_handles [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 812.167690] env[67015]: DEBUG oslo_vmware.rw_handles [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 812.168087] env[67015]: DEBUG oslo_vmware.rw_handles [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 812.537169] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537f8231-6993-4a52-86c5-6014a3f8158a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.545523] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee9196a-a50d-4d13-8466-20494be83ddb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.577076] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc00a2a-3e01-4a27-9dad-5d3f01be0744 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.584822] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eee941b-34c0-4d9e-a495-b886c65cf2bf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.598436] env[67015]: DEBUG nova.compute.provider_tree [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.608770] env[67015]: DEBUG nova.scheduler.client.report [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 812.622960] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.623563] env[67015]: ERROR nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.623563] env[67015]: Faults: ['InvalidArgument'] [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Traceback (most recent call last): [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self.driver.spawn(context, instance, image_meta, [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self._fetch_image_if_missing(context, vi) [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] image_cache(vi, tmp_image_ds_loc) [ 812.623563] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] vm_util.copy_virtual_disk( [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] session._wait_for_task(vmdk_copy_task) [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return self.wait_for_task(task_ref) [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return evt.wait() [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] result = hub.switch() [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] return self.greenlet.switch() [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 812.623931] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] self.f(*self.args, **self.kw) [ 812.624305] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 812.624305] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] raise exceptions.translate_fault(task_info.error) [ 812.624305] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.624305] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Faults: ['InvalidArgument'] [ 812.624305] env[67015]: ERROR nova.compute.manager [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] [ 812.624430] env[67015]: DEBUG nova.compute.utils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.626081] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Build of instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 was re-scheduled: A specified parameter was not correct: fileType [ 812.626081] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 812.626500] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 812.626698] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 812.626857] env[67015]: DEBUG nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 812.627055] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.227024] env[67015]: DEBUG nova.network.neutron [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.247022] env[67015]: INFO nova.compute.manager [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Took 0.62 seconds to deallocate network for instance. [ 813.369749] env[67015]: INFO nova.scheduler.client.report [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Deleted allocations for instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 [ 813.395784] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ca9bcb45-38b6-4bcb-991a-6d8416f739cc tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 248.697s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.396985] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 242.589s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.397196] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] During sync_power_state the instance has a pending task (spawning). Skip. [ 813.397406] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.401196] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 48.394s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.401196] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Acquiring lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.401196] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.401325] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.402482] env[67015]: INFO nova.compute.manager [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Terminating instance [ 813.405176] env[67015]: DEBUG nova.compute.manager [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 813.405430] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 813.406209] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57644e9c-43ac-4e98-8627-897542089cdf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.415882] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfdf791-10be-45b0-a955-41a80165800c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.428234] env[67015]: DEBUG nova.compute.manager [None req-d4eca598-4d95-41ee-bdaa-328b7b9d08c3 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: a634ad37-b9f9-40a0-b2c1-6cd8612ba274] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.451897] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3eb80b23-a4a3-43e6-9620-86bf1eb344f2 could not be found. [ 813.452011] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.452206] env[67015]: INFO nova.compute.manager [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 813.453719] env[67015]: DEBUG oslo.service.loopingcall [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.453719] env[67015]: DEBUG nova.compute.manager [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 813.453719] env[67015]: DEBUG nova.network.neutron [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.469681] env[67015]: DEBUG nova.compute.manager [None req-d4eca598-4d95-41ee-bdaa-328b7b9d08c3 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: a634ad37-b9f9-40a0-b2c1-6cd8612ba274] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.482647] env[67015]: DEBUG nova.network.neutron [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.491813] env[67015]: INFO nova.compute.manager [-] [instance: 3eb80b23-a4a3-43e6-9620-86bf1eb344f2] Took 0.04 seconds to deallocate network for instance. [ 813.494127] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d4eca598-4d95-41ee-bdaa-328b7b9d08c3 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "a634ad37-b9f9-40a0-b2c1-6cd8612ba274" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.412s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.512087] env[67015]: DEBUG nova.compute.manager [None req-0df10f79-42ae-4c71-aee7-4a6a188fa2a1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 85e5e3fd-942a-4a75-98b5-221d37a51dcf] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.571305] env[67015]: DEBUG nova.compute.manager [None req-0df10f79-42ae-4c71-aee7-4a6a188fa2a1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 85e5e3fd-942a-4a75-98b5-221d37a51dcf] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.601654] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0df10f79-42ae-4c71-aee7-4a6a188fa2a1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "85e5e3fd-942a-4a75-98b5-221d37a51dcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.609s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.617530] env[67015]: DEBUG nova.compute.manager [None req-7feca8b3-8585-4934-90bc-80ef23464c76 tempest-ServerActionsTestJSON-2043308483 tempest-ServerActionsTestJSON-2043308483-project-member] [instance: b10504c5-2770-45e0-bd3a-1fdc342b1397] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.624377] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3b810683-3fe7-475d-b396-3ad4d7b044b5 tempest-ServerDiagnosticsTest-910203082 tempest-ServerDiagnosticsTest-910203082-project-member] Lock "3eb80b23-a4a3-43e6-9620-86bf1eb344f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.225s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.645370] env[67015]: DEBUG nova.compute.manager [None req-7feca8b3-8585-4934-90bc-80ef23464c76 tempest-ServerActionsTestJSON-2043308483 tempest-ServerActionsTestJSON-2043308483-project-member] [instance: b10504c5-2770-45e0-bd3a-1fdc342b1397] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.668547] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7feca8b3-8585-4934-90bc-80ef23464c76 tempest-ServerActionsTestJSON-2043308483 tempest-ServerActionsTestJSON-2043308483-project-member] Lock "b10504c5-2770-45e0-bd3a-1fdc342b1397" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.436s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.680330] env[67015]: DEBUG nova.compute.manager [None req-3f050b95-ef5d-4acf-9592-b3e87abdf3aa tempest-ServersTestJSON-1523931028 tempest-ServersTestJSON-1523931028-project-member] [instance: 4df734f8-537a-462b-991e-472c15afeb61] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.709196] env[67015]: DEBUG nova.compute.manager [None req-3f050b95-ef5d-4acf-9592-b3e87abdf3aa tempest-ServersTestJSON-1523931028 tempest-ServersTestJSON-1523931028-project-member] [instance: 4df734f8-537a-462b-991e-472c15afeb61] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.740437] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f050b95-ef5d-4acf-9592-b3e87abdf3aa tempest-ServersTestJSON-1523931028 tempest-ServersTestJSON-1523931028-project-member] Lock "4df734f8-537a-462b-991e-472c15afeb61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.880s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.750665] env[67015]: DEBUG nova.compute.manager [None req-036afbd1-24b3-4e4e-86f3-c27bab5a23be tempest-FloatingIPsAssociationTestJSON-2063730022 tempest-FloatingIPsAssociationTestJSON-2063730022-project-member] [instance: 29c344b0-2ec3-44e6-8d33-fd988c26da7c] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.788895] env[67015]: DEBUG nova.compute.manager [None req-036afbd1-24b3-4e4e-86f3-c27bab5a23be tempest-FloatingIPsAssociationTestJSON-2063730022 tempest-FloatingIPsAssociationTestJSON-2063730022-project-member] [instance: 29c344b0-2ec3-44e6-8d33-fd988c26da7c] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.813867] env[67015]: DEBUG oslo_concurrency.lockutils [None req-036afbd1-24b3-4e4e-86f3-c27bab5a23be tempest-FloatingIPsAssociationTestJSON-2063730022 tempest-FloatingIPsAssociationTestJSON-2063730022-project-member] Lock "29c344b0-2ec3-44e6-8d33-fd988c26da7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.904s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.823412] env[67015]: DEBUG nova.compute.manager [None req-442784b2-0246-41d8-b804-081596854df1 tempest-ImagesOneServerTestJSON-1640681993 tempest-ImagesOneServerTestJSON-1640681993-project-member] [instance: 8cd6a13f-fae2-43ea-846e-24f4987cf2ea] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.857951] env[67015]: DEBUG nova.compute.manager [None req-442784b2-0246-41d8-b804-081596854df1 tempest-ImagesOneServerTestJSON-1640681993 tempest-ImagesOneServerTestJSON-1640681993-project-member] [instance: 8cd6a13f-fae2-43ea-846e-24f4987cf2ea] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.886619] env[67015]: DEBUG oslo_concurrency.lockutils [None req-442784b2-0246-41d8-b804-081596854df1 tempest-ImagesOneServerTestJSON-1640681993 tempest-ImagesOneServerTestJSON-1640681993-project-member] Lock "8cd6a13f-fae2-43ea-846e-24f4987cf2ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.029s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.904745] env[67015]: DEBUG nova.compute.manager [None req-7f48bfd1-1578-4c08-907c-d9d6a348ed19 tempest-InstanceActionsV221TestJSON-1617409752 tempest-InstanceActionsV221TestJSON-1617409752-project-member] [instance: ba11c5cc-bd2b-4500-a87e-941e64630c36] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.930437] env[67015]: DEBUG nova.compute.manager [None req-7f48bfd1-1578-4c08-907c-d9d6a348ed19 tempest-InstanceActionsV221TestJSON-1617409752 tempest-InstanceActionsV221TestJSON-1617409752-project-member] [instance: ba11c5cc-bd2b-4500-a87e-941e64630c36] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.956364] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f48bfd1-1578-4c08-907c-d9d6a348ed19 tempest-InstanceActionsV221TestJSON-1617409752 tempest-InstanceActionsV221TestJSON-1617409752-project-member] Lock "ba11c5cc-bd2b-4500-a87e-941e64630c36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.081s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.975811] env[67015]: DEBUG nova.compute.manager [None req-33527b3d-474b-41e8-9204-545440cb6478 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: fd673125-b2b3-4a7b-a90d-0452b95d5db8] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.003815] env[67015]: DEBUG nova.compute.manager [None req-33527b3d-474b-41e8-9204-545440cb6478 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: fd673125-b2b3-4a7b-a90d-0452b95d5db8] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.031407] env[67015]: DEBUG oslo_concurrency.lockutils [None req-33527b3d-474b-41e8-9204-545440cb6478 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "fd673125-b2b3-4a7b-a90d-0452b95d5db8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.147s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.042989] env[67015]: DEBUG nova.compute.manager [None req-8526e15b-1266-41c0-b552-5e7a15fc97dc tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] [instance: 76c4c961-0d94-445e-a861-b3880ef96d98] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.080246] env[67015]: DEBUG nova.compute.manager [None req-8526e15b-1266-41c0-b552-5e7a15fc97dc tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] [instance: 76c4c961-0d94-445e-a861-b3880ef96d98] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.110172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-8526e15b-1266-41c0-b552-5e7a15fc97dc tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Lock "76c4c961-0d94-445e-a861-b3880ef96d98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.017s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.125033] env[67015]: DEBUG nova.compute.manager [None req-82762d80-84f9-4661-a5db-09f3992bd856 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] [instance: 32cb4672-8ebe-472b-b4ff-48bffad679e6] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.154275] env[67015]: DEBUG nova.compute.manager [None req-82762d80-84f9-4661-a5db-09f3992bd856 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] [instance: 32cb4672-8ebe-472b-b4ff-48bffad679e6] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.185329] env[67015]: DEBUG oslo_concurrency.lockutils [None req-82762d80-84f9-4661-a5db-09f3992bd856 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Lock "32cb4672-8ebe-472b-b4ff-48bffad679e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.568s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.195651] env[67015]: DEBUG nova.compute.manager [None req-d0b75a1b-9828-4230-bdd4-7b46217a9b6a tempest-ServerRescueTestJSONUnderV235-1226696864 tempest-ServerRescueTestJSONUnderV235-1226696864-project-member] [instance: a35ac03a-2a8a-4c79-ae3f-5afc1f563964] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.234388] env[67015]: DEBUG nova.compute.manager [None req-d0b75a1b-9828-4230-bdd4-7b46217a9b6a tempest-ServerRescueTestJSONUnderV235-1226696864 tempest-ServerRescueTestJSONUnderV235-1226696864-project-member] [instance: a35ac03a-2a8a-4c79-ae3f-5afc1f563964] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.260982] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0b75a1b-9828-4230-bdd4-7b46217a9b6a tempest-ServerRescueTestJSONUnderV235-1226696864 tempest-ServerRescueTestJSONUnderV235-1226696864-project-member] Lock "a35ac03a-2a8a-4c79-ae3f-5afc1f563964" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.443s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.274172] env[67015]: DEBUG nova.compute.manager [None req-dcc5f623-064e-4169-b20d-97a88a7a6e5d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: bcef5f6b-c7b9-413c-b198-b858444a12da] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.304553] env[67015]: DEBUG nova.compute.manager [None req-dcc5f623-064e-4169-b20d-97a88a7a6e5d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: bcef5f6b-c7b9-413c-b198-b858444a12da] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.334468] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dcc5f623-064e-4169-b20d-97a88a7a6e5d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "bcef5f6b-c7b9-413c-b198-b858444a12da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.236s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.345796] env[67015]: DEBUG nova.compute.manager [None req-7f297844-3a5e-4aa9-96a1-c4af40267913 tempest-ServersTestBootFromVolume-375715076 tempest-ServersTestBootFromVolume-375715076-project-member] [instance: 1b24ae9d-f8bc-4c11-9b07-b4a4e435e269] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.379331] env[67015]: DEBUG nova.compute.manager [None req-7f297844-3a5e-4aa9-96a1-c4af40267913 tempest-ServersTestBootFromVolume-375715076 tempest-ServersTestBootFromVolume-375715076-project-member] [instance: 1b24ae9d-f8bc-4c11-9b07-b4a4e435e269] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.408019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7f297844-3a5e-4aa9-96a1-c4af40267913 tempest-ServersTestBootFromVolume-375715076 tempest-ServersTestBootFromVolume-375715076-project-member] Lock "1b24ae9d-f8bc-4c11-9b07-b4a4e435e269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.965s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.420102] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.496878] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.497149] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.498744] env[67015]: INFO nova.compute.claims [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.950446] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf493094-8411-4357-b788-e307fcff0c14 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.964016] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d2a428-56ca-44d3-878d-9f81f4f8d332 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.996346] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2e824c-9e50-4e6d-bfc9-344f3a995670 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.004308] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e878792-9b03-475d-9d97-136340882da0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.017618] env[67015]: DEBUG nova.compute.provider_tree [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.029440] env[67015]: DEBUG nova.scheduler.client.report [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 815.050780] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.553s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.051441] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 815.101222] env[67015]: DEBUG nova.compute.utils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.101222] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 815.101222] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 815.115465] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 815.187784] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 815.205252] env[67015]: DEBUG nova.policy [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10d731194482403b86bc07105f1b4605', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '175434d7671641f8bed84e497e701e4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 815.216194] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.216434] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.216592] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.216773] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.216921] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.217123] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.217360] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.217524] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.217690] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.217852] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.218042] env[67015]: DEBUG nova.virt.hardware [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.221605] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1971e3-5aac-4ef6-9f2e-ebbeabb37cf5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.227731] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3928590-00e2-491a-b281-50b6c421a4ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.173450] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Successfully created port: 25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.179774] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc8a9d4-548d-4bb8-8faa-8f424aae3840 tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "2546fa27-1d27-4e23-94f0-e6bdb4b42179" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.180030] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc8a9d4-548d-4bb8-8faa-8f424aae3840 tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "2546fa27-1d27-4e23-94f0-e6bdb4b42179" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.045742] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1786543-32b2-42f0-8a9e-e70a34245ac1 tempest-ServerShowV257Test-1044046039 tempest-ServerShowV257Test-1044046039-project-member] Acquiring lock "1dc453f0-0983-428f-a186-f61248fe74c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.046240] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1786543-32b2-42f0-8a9e-e70a34245ac1 tempest-ServerShowV257Test-1044046039 tempest-ServerShowV257Test-1044046039-project-member] Lock "1dc453f0-0983-428f-a186-f61248fe74c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.364017] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Successfully updated port: 25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.380156] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.381814] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquired lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.381814] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.474014] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.515634] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.515634] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 817.532309] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 817.532309] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.532309] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 817.543735] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.022902] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Updating instance_info_cache with network_info: [{"id": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "address": "fa:16:3e:b2:89:bf", "network": {"id": "775f1b11-eb43-4e50-9bdc-f7ab8c9cf217", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1014204807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "175434d7671641f8bed84e497e701e4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25990e3e-ad", "ovs_interfaceid": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.034296] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Releasing lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.034671] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance network_info: |[{"id": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "address": "fa:16:3e:b2:89:bf", "network": {"id": "775f1b11-eb43-4e50-9bdc-f7ab8c9cf217", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1014204807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "175434d7671641f8bed84e497e701e4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25990e3e-ad", "ovs_interfaceid": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 818.035401] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:89:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25990e3e-ade8-4239-bdde-9a4746ed04e6', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.043183] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Creating folder: Project (175434d7671641f8bed84e497e701e4f). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 818.043736] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10b23f3e-34d0-4be9-9cf8-2965a4b7ddb4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.054459] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Created folder: Project (175434d7671641f8bed84e497e701e4f) in parent group-v623108. [ 818.054718] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Creating folder: Instances. Parent ref: group-v623159. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 818.054992] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eab89fd7-ef5e-4261-b3c1-1df1f7274f2a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.064553] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Created folder: Instances in parent group-v623159. [ 818.064834] env[67015]: DEBUG oslo.service.loopingcall [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.065033] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 818.065283] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7553d466-8f2b-4c52-a3d3-c3d59ed859ef {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.086326] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.086326] env[67015]: value = "task-3114435" [ 818.086326] env[67015]: _type = "Task" [ 818.086326] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.093916] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114435, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.183206] env[67015]: DEBUG nova.compute.manager [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Received event network-vif-plugged-25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.183440] env[67015]: DEBUG oslo_concurrency.lockutils [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] Acquiring lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.183648] env[67015]: DEBUG oslo_concurrency.lockutils [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.183820] env[67015]: DEBUG oslo_concurrency.lockutils [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.183990] env[67015]: DEBUG nova.compute.manager [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] No waiting events found dispatching network-vif-plugged-25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 818.184487] env[67015]: WARNING nova.compute.manager [req-c6065427-994c-4b7a-a50d-76ef98771236 req-3cb0c4ea-efe1-4b7a-8d3f-5f5853db4873 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Received unexpected event network-vif-plugged-25990e3e-ade8-4239-bdde-9a4746ed04e6 for instance with vm_state building and task_state spawning. [ 818.397395] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f4d8d90a-600e-4b03-af5b-4b0a128f3867 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Acquiring lock "0371af11-3654-4009-8e87-dc2c516133be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.397395] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f4d8d90a-600e-4b03-af5b-4b0a128f3867 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Lock "0371af11-3654-4009-8e87-dc2c516133be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.597271] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114435, 'name': CreateVM_Task, 'duration_secs': 0.301363} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.597464] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 818.598508] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.598508] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.598636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.598847] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f11f48a-77c9-4c31-82db-5cf82862f363 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.603285] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for the task: (returnval){ [ 818.603285] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a48cc6-b287-33f9-1e3a-d40648a556ee" [ 818.603285] env[67015]: _type = "Task" [ 818.603285] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.611705] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a48cc6-b287-33f9-1e3a-d40648a556ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.113123] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.113403] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.113620] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.551468] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.247208] env[67015]: DEBUG nova.compute.manager [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Received event network-changed-25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.247486] env[67015]: DEBUG nova.compute.manager [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Refreshing instance network info cache due to event network-changed-25990e3e-ade8-4239-bdde-9a4746ed04e6. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.247613] env[67015]: DEBUG oslo_concurrency.lockutils [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] Acquiring lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.247769] env[67015]: DEBUG oslo_concurrency.lockutils [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] Acquired lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.247907] env[67015]: DEBUG nova.network.neutron [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Refreshing network info cache for port 25990e3e-ade8-4239-bdde-9a4746ed04e6 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 820.509120] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.782796] env[67015]: DEBUG nova.network.neutron [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Updated VIF entry in instance network info cache for port 25990e3e-ade8-4239-bdde-9a4746ed04e6. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 820.782796] env[67015]: DEBUG nova.network.neutron [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Updating instance_info_cache with network_info: [{"id": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "address": "fa:16:3e:b2:89:bf", "network": {"id": "775f1b11-eb43-4e50-9bdc-f7ab8c9cf217", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1014204807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "175434d7671641f8bed84e497e701e4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25990e3e-ad", "ovs_interfaceid": "25990e3e-ade8-4239-bdde-9a4746ed04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.792491] env[67015]: DEBUG oslo_concurrency.lockutils [req-8d2b8d70-054e-4a85-98c1-ce01df424d11 req-4a7eee2f-1c2b-49b7-82a2-f6c36c9e2b38 service nova] Releasing lock "refresh_cache-5c77964f-e902-489a-86c3-9c9d4dd304d3" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.070615] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.516637] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.516637] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.516637] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.516637] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.516637] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 822.513862] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.514172] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 822.514513] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 822.540615] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.541668] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.541974] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.542266] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.542527] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.542787] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.543045] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.543243] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.543413] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.543664] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 822.544271] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 822.544830] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.545463] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.559926] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.560291] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.560354] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.560515] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 822.562231] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7972cfde-17f6-404a-868f-dae957745242 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.573605] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6211e2d-0321-4ec0-894d-7d06e69f625c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.590074] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8777ca31-0efa-4335-b690-61cefaa5fb46 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.597642] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5ec760-91c6-4f35-b39a-2fb2b5b497d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.629850] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 822.630016] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.630219] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.815215] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815368] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815491] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815608] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815722] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815835] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.815945] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.816066] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.816178] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.816299] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 822.834707] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 76b9817f-5571-48f6-8144-08d18f635750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.849080] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0eca599-b9a0-40a5-968d-21ac240f815c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.868978] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 573d1e82-a96d-47e9-89b0-efd69306ed59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.887987] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 63420a94-ef64-407a-a032-61a619907b56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.905689] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.921586] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.937620] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.947537] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4a1f7297-67b0-4c57-8c11-101877c27e48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.961734] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0182f659-5d01-4d6f-8242-aaec4efae151 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.973323] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 00412418-9727-4ed0-b4ff-92981ddab7ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 822.992554] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6a333c84-7e40-4ba0-b60b-a5972720e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.007017] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5f04238c-701e-4ea3-9dde-769ec26a4462 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.015649] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 22f73210-0a16-4aa6-bc1d-d6625a6e4243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.026284] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 09674d48-8f73-40f7-8ff3-3d4198a053fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.036875] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.050681] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ef201b80-65b2-4fa1-8150-8b7a3fbea673 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.062708] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2546fa27-1d27-4e23-94f0-e6bdb4b42179 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.074863] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1dc453f0-0983-428f-a186-f61248fe74c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.090024] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0371af11-3654-4009-8e87-dc2c516133be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.090024] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 823.090024] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 823.110381] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 823.131197] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 823.131197] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.146993] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 823.187946] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 823.674614] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31389204-7ee9-4082-b628-c4eab451a9a6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.682356] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be583f6-0192-4e30-a5ec-a9b9445e3cc9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.713016] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d9219b-4a5e-4884-b1a0-b3e25847047d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.723612] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3144dd3a-26b8-47ac-a038-545f69472516 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.737130] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.748777] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 823.767960] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 823.768203] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.138s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.395963] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6c4aaec3-76e8-404d-a9ba-cf316de5d6be tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Acquiring lock "13a015ac-f68b-421c-a397-c3f7d71531fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.396315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6c4aaec3-76e8-404d-a9ba-cf316de5d6be tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Lock "13a015ac-f68b-421c-a397-c3f7d71531fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.936483] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16dad952-1ea6-4706-bfd1-672487196b9d tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "de8e3930-5fdc-49ca-8bbb-46b34dc32e8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.936796] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16dad952-1ea6-4706-bfd1-672487196b9d tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "de8e3930-5fdc-49ca-8bbb-46b34dc32e8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.012595] env[67015]: DEBUG oslo_concurrency.lockutils [None req-afec58e4-964d-415d-95d0-328fda8496b4 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Acquiring lock "fe41695e-0aee-45a6-b66b-a7185e45fc4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.012953] env[67015]: DEBUG oslo_concurrency.lockutils [None req-afec58e4-964d-415d-95d0-328fda8496b4 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "fe41695e-0aee-45a6-b66b-a7185e45fc4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.256529] env[67015]: WARNING oslo_vmware.rw_handles [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 859.256529] env[67015]: ERROR oslo_vmware.rw_handles [ 859.257286] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 859.258854] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 859.259117] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Copying Virtual Disk [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/054a844b-69fc-4641-94cb-f156136c95d4/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 859.259406] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85a51c01-1cdb-4d8a-9b6d-c8d4633b5c19 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.267260] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for the task: (returnval){ [ 859.267260] env[67015]: value = "task-3114436" [ 859.267260] env[67015]: _type = "Task" [ 859.267260] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.275692] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Task: {'id': task-3114436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.777864] env[67015]: DEBUG oslo_vmware.exceptions [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 859.778560] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.778785] env[67015]: ERROR nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 859.778785] env[67015]: Faults: ['InvalidArgument'] [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Traceback (most recent call last): [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] yield resources [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self.driver.spawn(context, instance, image_meta, [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self._vmops.spawn(context, instance, image_meta, injected_files, [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self._fetch_image_if_missing(context, vi) [ 859.778785] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] image_cache(vi, tmp_image_ds_loc) [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] vm_util.copy_virtual_disk( [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] session._wait_for_task(vmdk_copy_task) [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return self.wait_for_task(task_ref) [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return evt.wait() [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] result = hub.switch() [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 859.779208] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return self.greenlet.switch() [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self.f(*self.args, **self.kw) [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] raise exceptions.translate_fault(task_info.error) [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Faults: ['InvalidArgument'] [ 859.779671] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] [ 859.779671] env[67015]: INFO nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Terminating instance [ 859.781141] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.781345] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.781995] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 859.782331] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 859.782584] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89004d17-35a0-4c94-b19d-3d99d6ecb082 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.784819] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7ece49-8426-4a46-8812-12979194cf2e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.791219] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 859.791409] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c9663af-fc86-475e-85e0-8f356fa1bead {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.793542] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.793713] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 859.794660] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-098407d0-85f9-4ae3-af71-75919ebf1e15 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.798997] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 859.798997] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52f3130c-0db3-490b-6826-57dbbea449b3" [ 859.798997] env[67015]: _type = "Task" [ 859.798997] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.805937] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52f3130c-0db3-490b-6826-57dbbea449b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.859705] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 859.860651] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 859.860651] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Deleting the datastore file [datastore2] 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.860651] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5fbdd66-1f20-4d06-b9bc-7964c453704b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.867936] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for the task: (returnval){ [ 859.867936] env[67015]: value = "task-3114438" [ 859.867936] env[67015]: _type = "Task" [ 859.867936] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.876183] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Task: {'id': task-3114438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.320632] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 860.321024] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating directory with path [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.322704] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e76d1db0-6402-4012-b587-f33b94244d90 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.341384] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created directory with path [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.341599] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Fetch image to [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 860.341599] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 860.342382] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87183d8a-0cec-4d13-b27b-31c988173769 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.353619] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542ba1f3-249e-4290-864e-0479a2d4dfa5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.378221] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce0d9ae-5dbe-4662-9b1d-cab6c183ac75 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.386236] env[67015]: DEBUG oslo_vmware.api [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Task: {'id': task-3114438, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075881} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.410035] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.410238] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 860.410408] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 860.410580] env[67015]: INFO nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Took 0.63 seconds to destroy the instance on the hypervisor. [ 860.412810] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edb025e-12e7-4cca-a597-2e6f65b3b59d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.415457] env[67015]: DEBUG nova.compute.claims [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 860.415636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.415840] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.421558] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b2d391d-6601-44e6-945e-dadd46dead63 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.445477] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 860.507700] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 860.582830] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 860.582950] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 860.896730] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376a7407-fc54-4cd0-93b7-4660b7ef8b78 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.905151] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a8460c-1d8b-47e4-8ac4-1c5b012c40d0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.939699] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576abf44-80a9-45cf-a2eb-f07300ad9a45 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.948430] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a373d35-8b84-4295-b701-cfb04f9e3206 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.968340] env[67015]: DEBUG nova.compute.provider_tree [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.978242] env[67015]: DEBUG nova.scheduler.client.report [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 861.002418] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.003280] env[67015]: ERROR nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 861.003280] env[67015]: Faults: ['InvalidArgument'] [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Traceback (most recent call last): [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self.driver.spawn(context, instance, image_meta, [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self._vmops.spawn(context, instance, image_meta, injected_files, [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self._fetch_image_if_missing(context, vi) [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] image_cache(vi, tmp_image_ds_loc) [ 861.003280] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] vm_util.copy_virtual_disk( [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] session._wait_for_task(vmdk_copy_task) [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return self.wait_for_task(task_ref) [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return evt.wait() [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] result = hub.switch() [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] return self.greenlet.switch() [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 861.003712] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] self.f(*self.args, **self.kw) [ 861.004130] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 861.004130] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] raise exceptions.translate_fault(task_info.error) [ 861.004130] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 861.004130] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Faults: ['InvalidArgument'] [ 861.004130] env[67015]: ERROR nova.compute.manager [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] [ 861.004374] env[67015]: DEBUG nova.compute.utils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 861.006438] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Build of instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 was re-scheduled: A specified parameter was not correct: fileType [ 861.006438] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 861.007539] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 861.007753] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 861.009343] env[67015]: DEBUG nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 861.009343] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 861.908646] env[67015]: DEBUG nova.network.neutron [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.919452] env[67015]: INFO nova.compute.manager [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Took 0.91 seconds to deallocate network for instance. [ 862.038886] env[67015]: INFO nova.scheduler.client.report [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Deleted allocations for instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 [ 862.064983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fd2b2ec5-20de-4d78-a11a-99d272f4e529 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 296.195s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.066531] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 291.259s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.066730] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] During sync_power_state the instance has a pending task (spawning). Skip. [ 862.066909] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.067545] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 96.959s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.067768] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Acquiring lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.067970] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.068226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.070274] env[67015]: INFO nova.compute.manager [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Terminating instance [ 862.072270] env[67015]: DEBUG nova.compute.manager [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 862.072517] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 862.072812] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59a04697-8b0e-4c96-90c3-f990790913c9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.083213] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4a761e-1816-40af-b1dd-20ce9db8eb32 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.095790] env[67015]: DEBUG nova.compute.manager [None req-b22649bf-5559-4f9b-b2c7-87fdc23df00c tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] [instance: 76b9817f-5571-48f6-8144-08d18f635750] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 862.117779] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93 could not be found. [ 862.117999] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 862.118195] env[67015]: INFO nova.compute.manager [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Took 0.05 seconds to destroy the instance on the hypervisor. [ 862.118444] env[67015]: DEBUG oslo.service.loopingcall [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.118771] env[67015]: DEBUG nova.compute.manager [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 862.118771] env[67015]: DEBUG nova.network.neutron [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.122883] env[67015]: DEBUG nova.compute.manager [None req-b22649bf-5559-4f9b-b2c7-87fdc23df00c tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] [instance: 76b9817f-5571-48f6-8144-08d18f635750] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 862.142389] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b22649bf-5559-4f9b-b2c7-87fdc23df00c tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Lock "76b9817f-5571-48f6-8144-08d18f635750" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.835s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.152087] env[67015]: DEBUG nova.compute.manager [None req-176e4e6b-9e45-4e99-8f88-a71abf16d2f0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: e0eca599-b9a0-40a5-968d-21ac240f815c] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 862.172544] env[67015]: DEBUG nova.network.neutron [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.187669] env[67015]: DEBUG nova.compute.manager [None req-176e4e6b-9e45-4e99-8f88-a71abf16d2f0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: e0eca599-b9a0-40a5-968d-21ac240f815c] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 862.194710] env[67015]: INFO nova.compute.manager [-] [instance: 1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93] Took 0.08 seconds to deallocate network for instance. [ 862.212665] env[67015]: DEBUG oslo_concurrency.lockutils [None req-176e4e6b-9e45-4e99-8f88-a71abf16d2f0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "e0eca599-b9a0-40a5-968d-21ac240f815c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.904s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.221459] env[67015]: DEBUG nova.compute.manager [None req-5d4bbda1-8893-4b76-b584-179ff7412748 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: 573d1e82-a96d-47e9-89b0-efd69306ed59] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 862.265758] env[67015]: DEBUG nova.compute.manager [None req-5d4bbda1-8893-4b76-b584-179ff7412748 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: 573d1e82-a96d-47e9-89b0-efd69306ed59] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 862.306325] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5d4bbda1-8893-4b76-b584-179ff7412748 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "573d1e82-a96d-47e9-89b0-efd69306ed59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.549s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.321013] env[67015]: DEBUG nova.compute.manager [None req-bf7c5f91-7852-4a16-a8b4-1430bd404801 tempest-ServerActionsV293TestJSON-982079223 tempest-ServerActionsV293TestJSON-982079223-project-member] [instance: 63420a94-ef64-407a-a032-61a619907b56] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 862.361920] env[67015]: DEBUG nova.compute.manager [None req-bf7c5f91-7852-4a16-a8b4-1430bd404801 tempest-ServerActionsV293TestJSON-982079223 tempest-ServerActionsV293TestJSON-982079223-project-member] [instance: 63420a94-ef64-407a-a032-61a619907b56] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 862.368835] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6d6b3c99-f84e-4ddb-bbaa-ae29366a4405 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741 tempest-FloatingIPsAssociationNegativeTestJSON-1648899741-project-member] Lock "1bafb1b5-d27b-4ca1-bb9f-09f7ab4a2a93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.301s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.387637] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bf7c5f91-7852-4a16-a8b4-1430bd404801 tempest-ServerActionsV293TestJSON-982079223 tempest-ServerActionsV293TestJSON-982079223-project-member] Lock "63420a94-ef64-407a-a032-61a619907b56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.707s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.396427] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 862.456301] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.456564] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.458192] env[67015]: INFO nova.compute.claims [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.890458] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87cdca9-1790-41b7-a462-446e3fad1943 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.900389] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f201aaf-88fe-42af-b4f8-cda81064aa63 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.934877] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461a36c2-34e5-461b-baa7-da3b008060ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.942262] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0c6524-c96d-4aec-a437-d36a2b98df29 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.958420] env[67015]: DEBUG nova.compute.provider_tree [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.967315] env[67015]: DEBUG nova.scheduler.client.report [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 862.983159] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.527s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.983640] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 863.028804] env[67015]: DEBUG nova.compute.utils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.030365] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 863.030582] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 863.044038] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 863.128488] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 863.156520] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.156816] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.156940] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.157225] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.157461] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.157628] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.157845] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.158014] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.158183] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.158345] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.158513] env[67015]: DEBUG nova.virt.hardware [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.159379] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed66f282-ea2b-42e7-b88e-003f70a8dd13 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.169142] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4549473-541c-4f91-8a8a-660aa4a547af {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.174594] env[67015]: DEBUG nova.policy [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7c9cd5f4e1d4c619f6a890161587186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e08dff0a6cea4ba0a32d11749e42701f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 864.190948] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Successfully created port: 30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.759047] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Successfully updated port: 30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.770169] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.770169] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquired lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.770169] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.069310] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.186968] env[67015]: DEBUG nova.compute.manager [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Received event network-vif-plugged-30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.187195] env[67015]: DEBUG oslo_concurrency.lockutils [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] Acquiring lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.187497] env[67015]: DEBUG oslo_concurrency.lockutils [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.187696] env[67015]: DEBUG oslo_concurrency.lockutils [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.187865] env[67015]: DEBUG nova.compute.manager [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] No waiting events found dispatching network-vif-plugged-30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.188033] env[67015]: WARNING nova.compute.manager [req-5ac9a9c1-0fd2-4123-b821-8c00dc7ed2a3 req-4ac1f007-a448-4696-ad97-b4ba5d017f74 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Received unexpected event network-vif-plugged-30d0fd3f-c1a8-4b29-a20f-46ee26981695 for instance with vm_state building and task_state spawning. [ 866.338965] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Updating instance_info_cache with network_info: [{"id": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "address": "fa:16:3e:06:8c:94", "network": {"id": "566110f6-8e91-4a05-beeb-4b3136117bb0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1939787835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08dff0a6cea4ba0a32d11749e42701f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d0fd3f-c1", "ovs_interfaceid": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.352766] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Releasing lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.353091] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance network_info: |[{"id": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "address": "fa:16:3e:06:8c:94", "network": {"id": "566110f6-8e91-4a05-beeb-4b3136117bb0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1939787835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08dff0a6cea4ba0a32d11749e42701f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d0fd3f-c1", "ovs_interfaceid": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 866.353504] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:8c:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30d0fd3f-c1a8-4b29-a20f-46ee26981695', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.361192] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Creating folder: Project (e08dff0a6cea4ba0a32d11749e42701f). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.361796] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cdaf9d8-dece-4def-99c6-2af5cea175ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.374985] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Created folder: Project (e08dff0a6cea4ba0a32d11749e42701f) in parent group-v623108. [ 866.374985] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Creating folder: Instances. Parent ref: group-v623162. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.375153] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa45bad4-77cc-4574-891d-791547585f75 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.384263] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Created folder: Instances in parent group-v623162. [ 866.384504] env[67015]: DEBUG oslo.service.loopingcall [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.384734] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.384947] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0838b69-168f-4c16-88b3-099b3098816a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.404511] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.404511] env[67015]: value = "task-3114441" [ 866.404511] env[67015]: _type = "Task" [ 866.404511] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.412139] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114441, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.916655] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114441, 'name': CreateVM_Task, 'duration_secs': 0.335851} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.917226] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 866.918276] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.918641] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.920984] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.920984] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5db6512-c42a-4d7b-9995-286b420e0be9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.924706] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for the task: (returnval){ [ 866.924706] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52350d70-b500-4c52-e491-9b00385e2e62" [ 866.924706] env[67015]: _type = "Task" [ 866.924706] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.932802] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52350d70-b500-4c52-e491-9b00385e2e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.436548] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.436821] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.437056] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.757296] env[67015]: DEBUG nova.compute.manager [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Received event network-changed-30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.757296] env[67015]: DEBUG nova.compute.manager [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Refreshing instance network info cache due to event network-changed-30d0fd3f-c1a8-4b29-a20f-46ee26981695. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.757296] env[67015]: DEBUG oslo_concurrency.lockutils [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] Acquiring lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.757296] env[67015]: DEBUG oslo_concurrency.lockutils [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] Acquired lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.758381] env[67015]: DEBUG nova.network.neutron [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Refreshing network info cache for port 30d0fd3f-c1a8-4b29-a20f-46ee26981695 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 869.516970] env[67015]: DEBUG nova.network.neutron [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Updated VIF entry in instance network info cache for port 30d0fd3f-c1a8-4b29-a20f-46ee26981695. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.517415] env[67015]: DEBUG nova.network.neutron [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Updating instance_info_cache with network_info: [{"id": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "address": "fa:16:3e:06:8c:94", "network": {"id": "566110f6-8e91-4a05-beeb-4b3136117bb0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1939787835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e08dff0a6cea4ba0a32d11749e42701f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d0fd3f-c1", "ovs_interfaceid": "30d0fd3f-c1a8-4b29-a20f-46ee26981695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.531826] env[67015]: DEBUG oslo_concurrency.lockutils [req-af21b653-7925-4bad-9bd2-741897b734c7 req-59cb3ac6-8724-4ffe-b3d9-888520869a10 service nova] Releasing lock "refresh_cache-96feb18e-14ee-40cf-bd5d-89a4e773c797" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.958978] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.737892] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.738188] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.757012] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.757404] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.798592] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "92f1e0cd-924f-42a4-a91b-6e9dcede20fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.798592] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "92f1e0cd-924f-42a4-a91b-6e9dcede20fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.509663] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.532025] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.532261] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.515592] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.515946] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 882.516065] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 882.536765] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.536924] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537068] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537201] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537326] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537447] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537570] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537692] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537813] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.537930] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 882.538084] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 882.538620] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.513953] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.514223] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.514373] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 883.514529] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.525591] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.525896] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.525982] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.526135] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 883.527251] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bad88a8-57df-4cb3-8c0e-4efd8d0595fa {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.536077] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f9caaf-2793-4953-b1e5-695efd1a2c04 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.550130] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1207639-c9d1-423b-bc1e-60fe4d913813 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.556284] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e3081b-4b32-46de-a663-a7cc18363aff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.584754] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181054MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 883.584913] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.585126] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.658319] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.658491] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.658629] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.658746] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.658871] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.658991] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.659125] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.659245] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.659362] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.659503] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.674294] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.685990] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.696072] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4a1f7297-67b0-4c57-8c11-101877c27e48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.705808] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0182f659-5d01-4d6f-8242-aaec4efae151 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.717945] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 00412418-9727-4ed0-b4ff-92981ddab7ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.728135] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6a333c84-7e40-4ba0-b60b-a5972720e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.738153] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5f04238c-701e-4ea3-9dde-769ec26a4462 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.748057] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 22f73210-0a16-4aa6-bc1d-d6625a6e4243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.761278] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 09674d48-8f73-40f7-8ff3-3d4198a053fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.775261] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.785308] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ef201b80-65b2-4fa1-8150-8b7a3fbea673 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.796028] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2546fa27-1d27-4e23-94f0-e6bdb4b42179 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.807158] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1dc453f0-0983-428f-a186-f61248fe74c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.816784] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0371af11-3654-4009-8e87-dc2c516133be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.826816] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13a015ac-f68b-421c-a397-c3f7d71531fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.836443] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance de8e3930-5fdc-49ca-8bbb-46b34dc32e8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.846420] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fe41695e-0aee-45a6-b66b-a7185e45fc4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.856404] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.865691] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 92f1e0cd-924f-42a4-a91b-6e9dcede20fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.866021] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 883.866208] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 884.194285] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e609a704-b774-4fd7-930b-2f3893ef2783 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.201590] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c69f14-8a30-4529-9c04-b43718fb131b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.231755] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477c9622-423d-4024-a452-da22cfe46cce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.238747] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f877e53-0241-4845-9e2f-648cf4d9da08 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.251446] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.260667] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 884.275660] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 884.275827] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.691s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.747994] env[67015]: WARNING oslo_vmware.rw_handles [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 905.747994] env[67015]: ERROR oslo_vmware.rw_handles [ 905.748658] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 905.750252] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 905.750513] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Copying Virtual Disk [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/73e0a291-76b7-4e71-a65e-3cda509b2221/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 905.750823] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4e941a6-96be-4cf8-b435-dab10b71be6c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.759704] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 905.759704] env[67015]: value = "task-3114442" [ 905.759704] env[67015]: _type = "Task" [ 905.759704] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.767347] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': task-3114442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.270268] env[67015]: DEBUG oslo_vmware.exceptions [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 906.270549] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.271124] env[67015]: ERROR nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 906.271124] env[67015]: Faults: ['InvalidArgument'] [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Traceback (most recent call last): [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] yield resources [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self.driver.spawn(context, instance, image_meta, [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self._fetch_image_if_missing(context, vi) [ 906.271124] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] image_cache(vi, tmp_image_ds_loc) [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] vm_util.copy_virtual_disk( [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] session._wait_for_task(vmdk_copy_task) [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return self.wait_for_task(task_ref) [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return evt.wait() [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] result = hub.switch() [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 906.271531] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return self.greenlet.switch() [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self.f(*self.args, **self.kw) [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] raise exceptions.translate_fault(task_info.error) [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Faults: ['InvalidArgument'] [ 906.271976] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] [ 906.271976] env[67015]: INFO nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Terminating instance [ 906.273033] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.273253] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.273495] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cc07799-8114-49dd-8d7c-9e75d075fb15 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.275600] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 906.275787] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 906.276533] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64140b3-c3e3-4bab-a8b6-d1c69a634b19 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.283158] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 906.283399] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9605278-4b16-47b2-af59-a76fc8cc8632 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.285464] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.285632] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 906.286597] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d85817d-d128-4fa8-a76f-53fbcb86acc1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.291287] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for the task: (returnval){ [ 906.291287] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52eced5e-e273-c06a-8d0b-eb1d5540db37" [ 906.291287] env[67015]: _type = "Task" [ 906.291287] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.298203] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52eced5e-e273-c06a-8d0b-eb1d5540db37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.354943] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 906.355188] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 906.355373] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleting the datastore file [datastore2] e0ac77e1-fb77-4b97-bacc-838cc3e16bbc {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.355662] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c743204-b421-4647-8814-b13d9c89f619 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.362546] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 906.362546] env[67015]: value = "task-3114444" [ 906.362546] env[67015]: _type = "Task" [ 906.362546] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.369968] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': task-3114444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.803130] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 906.803130] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Creating directory with path [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.803130] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-888c8a67-66a3-4c92-8bc7-8a40ee411f6f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.814173] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Created directory with path [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.814389] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Fetch image to [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 906.814613] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 906.815395] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fe2d86-1803-4ac5-97c1-3ca32156313b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.821803] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ac5869-e8aa-4857-a6b3-c410927ac1db {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.831653] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcaa3b0-599a-4485-adba-d2d402bb6963 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.862151] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c760d28-996a-41e4-8065-c3e339c50cc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.872682] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-172ed75a-2071-4f7f-9cec-56ab1e71e201 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.874366] env[67015]: DEBUG oslo_vmware.api [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': task-3114444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067883} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.874605] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.874789] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 906.874983] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 906.875182] env[67015]: INFO nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Took 0.60 seconds to destroy the instance on the hypervisor. [ 906.877367] env[67015]: DEBUG nova.compute.claims [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 906.877535] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.877740] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.909189] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 906.969162] env[67015]: DEBUG oslo_vmware.rw_handles [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 907.033289] env[67015]: DEBUG oslo_vmware.rw_handles [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 907.033521] env[67015]: DEBUG oslo_vmware.rw_handles [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 907.331058] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff1636e-07f2-457e-9af6-cd61f98d9ac4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.337673] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34893a3b-e622-4219-adce-b0c932e4ec70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.370548] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03975d1-a993-4152-8986-f3c0ef6df981 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.378656] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31075c88-29af-46fb-9cbb-eaddea5e60eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.391702] env[67015]: DEBUG nova.compute.provider_tree [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.400379] env[67015]: DEBUG nova.scheduler.client.report [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 907.417130] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.539s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.417663] env[67015]: ERROR nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 907.417663] env[67015]: Faults: ['InvalidArgument'] [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Traceback (most recent call last): [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self.driver.spawn(context, instance, image_meta, [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self._fetch_image_if_missing(context, vi) [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] image_cache(vi, tmp_image_ds_loc) [ 907.417663] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] vm_util.copy_virtual_disk( [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] session._wait_for_task(vmdk_copy_task) [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return self.wait_for_task(task_ref) [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return evt.wait() [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] result = hub.switch() [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] return self.greenlet.switch() [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 907.418008] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] self.f(*self.args, **self.kw) [ 907.418381] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 907.418381] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] raise exceptions.translate_fault(task_info.error) [ 907.418381] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 907.418381] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Faults: ['InvalidArgument'] [ 907.418381] env[67015]: ERROR nova.compute.manager [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] [ 907.418381] env[67015]: DEBUG nova.compute.utils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 907.419906] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Build of instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc was re-scheduled: A specified parameter was not correct: fileType [ 907.419906] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 907.420293] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 907.420467] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 907.420625] env[67015]: DEBUG nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 907.420790] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 907.788059] env[67015]: DEBUG nova.network.neutron [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.803260] env[67015]: INFO nova.compute.manager [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Took 0.38 seconds to deallocate network for instance. [ 907.903464] env[67015]: INFO nova.scheduler.client.report [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleted allocations for instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc [ 907.926867] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d0d877da-766b-4a7b-8f26-c6f674196239 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 341.055s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.928438] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 337.120s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.928575] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] During sync_power_state the instance has a pending task (spawning). Skip. [ 907.928754] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.929201] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 141.824s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.929456] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.929839] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.930120] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.932094] env[67015]: INFO nova.compute.manager [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Terminating instance [ 907.933845] env[67015]: DEBUG nova.compute.manager [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 907.934562] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.934562] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c36035ac-e583-46c4-8857-db32c7cbe24b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.941087] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 907.947760] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34643b2a-a0b1-4186-a02f-e192db7bb7a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.977801] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e0ac77e1-fb77-4b97-bacc-838cc3e16bbc could not be found. [ 907.977973] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 907.978174] env[67015]: INFO nova.compute.manager [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 907.978429] env[67015]: DEBUG oslo.service.loopingcall [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.978644] env[67015]: DEBUG nova.compute.manager [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 907.978744] env[67015]: DEBUG nova.network.neutron [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.003775] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.004088] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.005657] env[67015]: INFO nova.compute.claims [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.016618] env[67015]: DEBUG nova.network.neutron [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.027476] env[67015]: INFO nova.compute.manager [-] [instance: e0ac77e1-fb77-4b97-bacc-838cc3e16bbc] Took 0.05 seconds to deallocate network for instance. [ 908.124695] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39c63962-4949-4572-92ba-704594b8a39c tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "e0ac77e1-fb77-4b97-bacc-838cc3e16bbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.398055] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4531914-ab84-4e76-909b-bbe066ebe968 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.405895] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9f1bea-5765-479a-9a6f-8f16af32e48d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.434810] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f122d1b-e6cd-4a38-91fc-2433ea486788 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.441935] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f1654e-93ed-4d5f-baec-16b95569fbb4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.454542] env[67015]: DEBUG nova.compute.provider_tree [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.465315] env[67015]: DEBUG nova.scheduler.client.report [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 908.485395] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.481s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.485728] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 908.548720] env[67015]: DEBUG nova.compute.utils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.550336] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 908.550565] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 908.569560] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 908.653703] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 908.678902] env[67015]: DEBUG nova.policy [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b984ad28d0ba460186b44c327693c836', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cea460e4b044c1dace1d040d25da1fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 908.682268] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.682485] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.682643] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.682829] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.682976] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.683140] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.683350] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.683507] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.683721] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.683825] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.683992] env[67015]: DEBUG nova.virt.hardware [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.684825] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c78b5d-c3ec-43e2-8474-44cad660cd53 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.693246] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92324cdc-928a-4e5f-851e-7e1166f24102 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.234922] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "437b21d5-932d-4216-b7f7-17c6eab2665f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.235270] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.500048] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Successfully created port: 3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.672375] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Successfully updated port: 3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.690295] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.690632] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquired lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.690826] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 910.793523] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.877264] env[67015]: DEBUG nova.compute.manager [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Received event network-vif-plugged-3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.877521] env[67015]: DEBUG oslo_concurrency.lockutils [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] Acquiring lock "30f91210-0318-4912-808b-843c2cd04ea1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.877880] env[67015]: DEBUG oslo_concurrency.lockutils [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] Lock "30f91210-0318-4912-808b-843c2cd04ea1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.878214] env[67015]: DEBUG oslo_concurrency.lockutils [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] Lock "30f91210-0318-4912-808b-843c2cd04ea1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.878520] env[67015]: DEBUG nova.compute.manager [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] No waiting events found dispatching network-vif-plugged-3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 910.878833] env[67015]: WARNING nova.compute.manager [req-d75f5eba-3388-4bb8-ae45-c20d00d7e86a req-d9678fc2-4fa8-44ec-a926-d48dd9b5ae17 service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Received unexpected event network-vif-plugged-3bc01ab5-4688-4913-8d90-85504de2548f for instance with vm_state building and task_state spawning. [ 911.102683] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Updating instance_info_cache with network_info: [{"id": "3bc01ab5-4688-4913-8d90-85504de2548f", "address": "fa:16:3e:90:ff:2a", "network": {"id": "e9225289-1391-4480-ae9c-3d6659704be4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1470457552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cea460e4b044c1dace1d040d25da1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc01ab5-46", "ovs_interfaceid": "3bc01ab5-4688-4913-8d90-85504de2548f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.117496] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Releasing lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.118119] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance network_info: |[{"id": "3bc01ab5-4688-4913-8d90-85504de2548f", "address": "fa:16:3e:90:ff:2a", "network": {"id": "e9225289-1391-4480-ae9c-3d6659704be4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1470457552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cea460e4b044c1dace1d040d25da1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc01ab5-46", "ovs_interfaceid": "3bc01ab5-4688-4913-8d90-85504de2548f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 911.118894] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:ff:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bc01ab5-4688-4913-8d90-85504de2548f', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.134182] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Creating folder: Project (0cea460e4b044c1dace1d040d25da1fa). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 911.134926] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-434bd224-ba79-4d3e-ac07-457a4568b83b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.146982] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Created folder: Project (0cea460e4b044c1dace1d040d25da1fa) in parent group-v623108. [ 911.147311] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Creating folder: Instances. Parent ref: group-v623165. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 911.147641] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8110335a-3e89-4f18-897f-8b07f1f8b236 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.157689] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Created folder: Instances in parent group-v623165. [ 911.157940] env[67015]: DEBUG oslo.service.loopingcall [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.158110] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 911.158303] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dac98584-674d-4457-8f7b-94016fd506ad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.176533] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.176533] env[67015]: value = "task-3114447" [ 911.176533] env[67015]: _type = "Task" [ 911.176533] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.185303] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114447, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.687258] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114447, 'name': CreateVM_Task, 'duration_secs': 0.301914} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.687566] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 911.688108] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.688288] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.688613] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 911.688859] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c3f6dae-5d26-4ff5-ac86-91a22f6c3e5b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.693371] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for the task: (returnval){ [ 911.693371] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]529f6358-d67a-2c2d-7ded-638a4da04709" [ 911.693371] env[67015]: _type = "Task" [ 911.693371] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.700869] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]529f6358-d67a-2c2d-7ded-638a4da04709, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.203755] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.204036] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.204264] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.958184] env[67015]: DEBUG nova.compute.manager [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Received event network-changed-3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.958417] env[67015]: DEBUG nova.compute.manager [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Refreshing instance network info cache due to event network-changed-3bc01ab5-4688-4913-8d90-85504de2548f. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 912.958540] env[67015]: DEBUG oslo_concurrency.lockutils [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] Acquiring lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.958909] env[67015]: DEBUG oslo_concurrency.lockutils [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] Acquired lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.959111] env[67015]: DEBUG nova.network.neutron [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Refreshing network info cache for port 3bc01ab5-4688-4913-8d90-85504de2548f {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 913.331858] env[67015]: DEBUG nova.network.neutron [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Updated VIF entry in instance network info cache for port 3bc01ab5-4688-4913-8d90-85504de2548f. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.332235] env[67015]: DEBUG nova.network.neutron [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Updating instance_info_cache with network_info: [{"id": "3bc01ab5-4688-4913-8d90-85504de2548f", "address": "fa:16:3e:90:ff:2a", "network": {"id": "e9225289-1391-4480-ae9c-3d6659704be4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1470457552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cea460e4b044c1dace1d040d25da1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc01ab5-46", "ovs_interfaceid": "3bc01ab5-4688-4913-8d90-85504de2548f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.342137] env[67015]: DEBUG oslo_concurrency.lockutils [req-8f33d927-8a89-42a2-b025-a7e323f5a496 req-04f8be95-3b0d-4352-bcb1-589dd063891a service nova] Releasing lock "refresh_cache-30f91210-0318-4912-808b-843c2cd04ea1" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.213824] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "30f91210-0318-4912-808b-843c2cd04ea1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.276479] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.509691] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.514663] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.514982] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.515060] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.515286] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 944.513674] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.513877] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 944.513999] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 944.539410] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.539769] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.539769] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.539855] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.539929] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540063] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540192] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540311] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540430] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540548] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 944.540969] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 944.541549] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.541758] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.552569] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.552779] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.552943] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.553132] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 944.554200] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4bfc7e-835c-42b4-a950-c59c4c1f8f6d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.563239] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80e0523-c24e-40ae-863a-bab84ff82ab8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.578511] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b56d3ef-257a-4521-a1c8-bcbefb22f62a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.584580] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488c013d-03fd-4269-a0dc-c9ba35c75e68 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.614049] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181031MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 944.614210] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.614398] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.688991] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13e28171-8074-4660-91cf-f6d569414fc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.688991] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.689182] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.689223] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.689410] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.690033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.690033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.690033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.690033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.690272] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.718203] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.729381] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4a1f7297-67b0-4c57-8c11-101877c27e48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.740013] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0182f659-5d01-4d6f-8242-aaec4efae151 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.749641] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 00412418-9727-4ed0-b4ff-92981ddab7ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.759064] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6a333c84-7e40-4ba0-b60b-a5972720e306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.768717] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5f04238c-701e-4ea3-9dde-769ec26a4462 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.778498] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 22f73210-0a16-4aa6-bc1d-d6625a6e4243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.787801] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 09674d48-8f73-40f7-8ff3-3d4198a053fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.798293] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.807464] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance ef201b80-65b2-4fa1-8150-8b7a3fbea673 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.818138] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2546fa27-1d27-4e23-94f0-e6bdb4b42179 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.829441] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1dc453f0-0983-428f-a186-f61248fe74c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.838293] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0371af11-3654-4009-8e87-dc2c516133be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.848437] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13a015ac-f68b-421c-a397-c3f7d71531fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.858266] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance de8e3930-5fdc-49ca-8bbb-46b34dc32e8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.867100] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fe41695e-0aee-45a6-b66b-a7185e45fc4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.879639] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.890565] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 92f1e0cd-924f-42a4-a91b-6e9dcede20fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.901408] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.901609] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 944.901817] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 945.228157] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a656ffba-0042-4c87-929f-e10ee01c3116 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.236328] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c55b595-b160-4434-8b0f-ba5eda083753 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.266264] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2021f92-1c6c-40a7-9b87-4d555db5d205 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.273840] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1058d6-2fb7-40d4-8ec2-d7ae648d0d1d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.287735] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.303450] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 945.315321] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 945.315513] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.701s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.288474] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.265770] env[67015]: WARNING oslo_vmware.rw_handles [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 953.265770] env[67015]: ERROR oslo_vmware.rw_handles [ 953.266621] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 953.268047] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 953.268307] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Copying Virtual Disk [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/99481127-6390-459a-8287-bfd1b1bd0685/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 953.268590] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-423ca197-1e4a-4d46-8734-6de66904b074 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.278321] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for the task: (returnval){ [ 953.278321] env[67015]: value = "task-3114448" [ 953.278321] env[67015]: _type = "Task" [ 953.278321] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.287300] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Task: {'id': task-3114448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.788575] env[67015]: DEBUG oslo_vmware.exceptions [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 953.788860] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.789408] env[67015]: ERROR nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.789408] env[67015]: Faults: ['InvalidArgument'] [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Traceback (most recent call last): [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] yield resources [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self.driver.spawn(context, instance, image_meta, [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self._fetch_image_if_missing(context, vi) [ 953.789408] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] image_cache(vi, tmp_image_ds_loc) [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] vm_util.copy_virtual_disk( [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] session._wait_for_task(vmdk_copy_task) [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return self.wait_for_task(task_ref) [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return evt.wait() [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] result = hub.switch() [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 953.789818] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return self.greenlet.switch() [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self.f(*self.args, **self.kw) [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] raise exceptions.translate_fault(task_info.error) [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Faults: ['InvalidArgument'] [ 953.790427] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] [ 953.790427] env[67015]: INFO nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Terminating instance [ 953.791790] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.791790] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.791790] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a5d1370-93aa-4cae-8727-db568dfee4fd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.793959] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 953.794155] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.794862] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7813d32-e844-4169-8ba9-738c9e093444 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.801940] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.802217] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28d02b2c-b89c-43cd-8a49-7703b58a4125 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.804265] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.804437] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 953.805426] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e64cca1-d0ae-42c1-ae31-43039604ebc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.810414] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for the task: (returnval){ [ 953.810414] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5243bcf6-88f2-49f6-3743-4b7b23d05aea" [ 953.810414] env[67015]: _type = "Task" [ 953.810414] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.817278] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5243bcf6-88f2-49f6-3743-4b7b23d05aea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.877036] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.877246] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.877418] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Deleting the datastore file [datastore2] 13e28171-8074-4660-91cf-f6d569414fc6 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.877671] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed082f3d-52f4-4a65-9a31-830eb321e454 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.883875] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for the task: (returnval){ [ 953.883875] env[67015]: value = "task-3114450" [ 953.883875] env[67015]: _type = "Task" [ 953.883875] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.891341] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Task: {'id': task-3114450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.321108] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 954.321417] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Creating directory with path [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.321615] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43606322-0527-43dd-a864-4416d9b54802 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.332820] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Created directory with path [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.333039] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Fetch image to [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 954.333221] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 954.334038] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36d1d77-d115-4a1b-866b-c21f0d896cba {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.340732] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da935de-f06e-45a4-a9ed-4b66a71e2119 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.349580] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e073dee7-023a-4852-b692-6a5002cd1f90 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.378995] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5d6b05-5423-4eb5-983e-8c432216e09d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.386852] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-99a4f6f6-0197-4b35-bf06-53c9eabcff0b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.393715] env[67015]: DEBUG oslo_vmware.api [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Task: {'id': task-3114450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07608} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.393942] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.394157] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.394330] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.394502] env[67015]: INFO nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 954.396549] env[67015]: DEBUG nova.compute.claims [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 954.396719] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.396928] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.411903] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 954.476188] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 954.538637] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 954.539520] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 954.851734] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d65aff-20b0-4892-b1bc-1a77920927b3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.859412] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6ef58d-ec8e-4198-89e6-4c1aa7f1cc05 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.888853] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125b8119-0b38-4bac-9237-476524ea683d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.896174] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0cd1cc-bbb4-4544-86a9-8ec330234b04 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.909513] env[67015]: DEBUG nova.compute.provider_tree [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.918866] env[67015]: DEBUG nova.scheduler.client.report [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 954.937278] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.540s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.937812] env[67015]: ERROR nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.937812] env[67015]: Faults: ['InvalidArgument'] [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Traceback (most recent call last): [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self.driver.spawn(context, instance, image_meta, [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self._fetch_image_if_missing(context, vi) [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] image_cache(vi, tmp_image_ds_loc) [ 954.937812] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] vm_util.copy_virtual_disk( [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] session._wait_for_task(vmdk_copy_task) [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return self.wait_for_task(task_ref) [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return evt.wait() [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] result = hub.switch() [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] return self.greenlet.switch() [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 954.938262] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] self.f(*self.args, **self.kw) [ 954.938691] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 954.938691] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] raise exceptions.translate_fault(task_info.error) [ 954.938691] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.938691] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Faults: ['InvalidArgument'] [ 954.938691] env[67015]: ERROR nova.compute.manager [instance: 13e28171-8074-4660-91cf-f6d569414fc6] [ 954.938691] env[67015]: DEBUG nova.compute.utils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 954.940150] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Build of instance 13e28171-8074-4660-91cf-f6d569414fc6 was re-scheduled: A specified parameter was not correct: fileType [ 954.940150] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 954.940561] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 954.940749] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 954.940905] env[67015]: DEBUG nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 954.941082] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.322489] env[67015]: DEBUG nova.network.neutron [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.339025] env[67015]: INFO nova.compute.manager [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Took 0.40 seconds to deallocate network for instance. [ 955.427143] env[67015]: INFO nova.scheduler.client.report [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Deleted allocations for instance 13e28171-8074-4660-91cf-f6d569414fc6 [ 955.454283] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c08c9574-1136-4d51-9751-9ad93d3d5a24 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 388.574s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.456061] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "13e28171-8074-4660-91cf-f6d569414fc6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 384.646s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.456061] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] During sync_power_state the instance has a pending task (spawning). Skip. [ 955.456194] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "13e28171-8074-4660-91cf-f6d569414fc6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.456865] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 187.849s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.457094] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Acquiring lock "13e28171-8074-4660-91cf-f6d569414fc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.457302] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.457630] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.459498] env[67015]: INFO nova.compute.manager [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Terminating instance [ 955.463008] env[67015]: DEBUG nova.compute.manager [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 955.463244] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.463739] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72199840-3f0c-4dc1-828f-2265b72f5e45 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.473876] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99218cff-b447-47ba-bc5f-28f3361aeaa4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.484907] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.505720] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 13e28171-8074-4660-91cf-f6d569414fc6 could not be found. [ 955.505930] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 955.506126] env[67015]: INFO nova.compute.manager [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 955.506521] env[67015]: DEBUG oslo.service.loopingcall [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.506662] env[67015]: DEBUG nova.compute.manager [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 955.506933] env[67015]: DEBUG nova.network.neutron [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.537630] env[67015]: DEBUG nova.network.neutron [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.546143] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.546428] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.548280] env[67015]: INFO nova.compute.claims [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.552034] env[67015]: INFO nova.compute.manager [-] [instance: 13e28171-8074-4660-91cf-f6d569414fc6] Took 0.05 seconds to deallocate network for instance. [ 955.670496] env[67015]: DEBUG oslo_concurrency.lockutils [None req-70a69cce-9e5e-4bba-a606-0a5fa685fe06 tempest-ServersAdminNegativeTestJSON-1480454963 tempest-ServersAdminNegativeTestJSON-1480454963-project-member] Lock "13e28171-8074-4660-91cf-f6d569414fc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.961990] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f80a26d-5987-4f8e-a5ea-808238763f41 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.969607] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ef4da6-10c9-467d-b590-7628e3d8b164 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.001038] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd0144e-8206-44b6-9a54-30a978d072b3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.008241] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3cbd17-9ce4-4d2b-86f8-fc79a6536693 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.021395] env[67015]: DEBUG nova.compute.provider_tree [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.029554] env[67015]: DEBUG nova.scheduler.client.report [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 956.044875] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.498s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.045403] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 956.082115] env[67015]: DEBUG nova.compute.utils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.083386] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 956.083559] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 956.092097] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 956.159071] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 956.184141] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 956.184401] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 956.184564] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.184748] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 956.184897] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.185058] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 956.185293] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 956.185470] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 956.185642] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 956.185806] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 956.185979] env[67015]: DEBUG nova.virt.hardware [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.186845] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2561bbb5-34ae-4601-8b32-e84cadcaca50 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.194664] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5316415a-70a8-4c02-ac23-5212ab4fdadd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.375093] env[67015]: DEBUG nova.policy [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ee91e5d0eda43f99d5ec38bcbd27cd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '983a54fba03f4120a02f1f9596c31898', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 956.958712] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Successfully created port: 0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.092402] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Successfully updated port: 0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.110351] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.110351] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquired lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.110351] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.196328] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.226076] env[67015]: DEBUG nova.compute.manager [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Received event network-vif-plugged-0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 958.226381] env[67015]: DEBUG oslo_concurrency.lockutils [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] Acquiring lock "843278e1-7d76-4f50-8170-9e335d29326e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.228019] env[67015]: DEBUG oslo_concurrency.lockutils [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] Lock "843278e1-7d76-4f50-8170-9e335d29326e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.228019] env[67015]: DEBUG oslo_concurrency.lockutils [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] Lock "843278e1-7d76-4f50-8170-9e335d29326e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.228019] env[67015]: DEBUG nova.compute.manager [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] No waiting events found dispatching network-vif-plugged-0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 958.228019] env[67015]: WARNING nova.compute.manager [req-66f46ccc-cbf9-4547-b669-94bc6d741c5f req-e81de98e-b3a9-448b-a0cc-865d76b9a2ac service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Received unexpected event network-vif-plugged-0aacf2fa-aa19-4100-b241-db77571fb584 for instance with vm_state building and task_state spawning. [ 958.706309] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Updating instance_info_cache with network_info: [{"id": "0aacf2fa-aa19-4100-b241-db77571fb584", "address": "fa:16:3e:81:3b:83", "network": {"id": "5643b949-4187-4632-949e-11c04e0fa098", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1720289988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "983a54fba03f4120a02f1f9596c31898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aacf2fa-aa", "ovs_interfaceid": "0aacf2fa-aa19-4100-b241-db77571fb584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.724986] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Releasing lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.725413] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance network_info: |[{"id": "0aacf2fa-aa19-4100-b241-db77571fb584", "address": "fa:16:3e:81:3b:83", "network": {"id": "5643b949-4187-4632-949e-11c04e0fa098", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1720289988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "983a54fba03f4120a02f1f9596c31898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aacf2fa-aa", "ovs_interfaceid": "0aacf2fa-aa19-4100-b241-db77571fb584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 958.725893] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:3b:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b107fab-ee71-47db-ad4d-3c6f05546843', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aacf2fa-aa19-4100-b241-db77571fb584', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.733315] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Creating folder: Project (983a54fba03f4120a02f1f9596c31898). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.735508] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a08e79b6-9a4f-4d03-9c22-eef4bd601582 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.749814] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Created folder: Project (983a54fba03f4120a02f1f9596c31898) in parent group-v623108. [ 958.750032] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Creating folder: Instances. Parent ref: group-v623168. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.750276] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43759187-9ed0-4292-952d-18fd1c5208a4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.762593] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Created folder: Instances in parent group-v623168. [ 958.763475] env[67015]: DEBUG oslo.service.loopingcall [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.763475] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 958.763475] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49429fcc-3f6f-4527-b440-46f00fd95bae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.787542] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.787542] env[67015]: value = "task-3114453" [ 958.787542] env[67015]: _type = "Task" [ 958.787542] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.796799] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114453, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.297488] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114453, 'name': CreateVM_Task, 'duration_secs': 0.303126} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.297817] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.298975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.298975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.298975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.299097] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281874a7-af66-4f89-93e1-d604b59ba3a4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.303703] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for the task: (returnval){ [ 959.303703] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5211f9a6-1141-020a-c0ee-678d92781cad" [ 959.303703] env[67015]: _type = "Task" [ 959.303703] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.311463] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5211f9a6-1141-020a-c0ee-678d92781cad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.816712] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.816987] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.817213] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.597202] env[67015]: DEBUG nova.compute.manager [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Received event network-changed-0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.597202] env[67015]: DEBUG nova.compute.manager [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Refreshing instance network info cache due to event network-changed-0aacf2fa-aa19-4100-b241-db77571fb584. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 960.597202] env[67015]: DEBUG oslo_concurrency.lockutils [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] Acquiring lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.597202] env[67015]: DEBUG oslo_concurrency.lockutils [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] Acquired lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.598033] env[67015]: DEBUG nova.network.neutron [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Refreshing network info cache for port 0aacf2fa-aa19-4100-b241-db77571fb584 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 961.546954] env[67015]: DEBUG nova.network.neutron [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Updated VIF entry in instance network info cache for port 0aacf2fa-aa19-4100-b241-db77571fb584. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 961.547323] env[67015]: DEBUG nova.network.neutron [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Updating instance_info_cache with network_info: [{"id": "0aacf2fa-aa19-4100-b241-db77571fb584", "address": "fa:16:3e:81:3b:83", "network": {"id": "5643b949-4187-4632-949e-11c04e0fa098", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1720289988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "983a54fba03f4120a02f1f9596c31898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aacf2fa-aa", "ovs_interfaceid": "0aacf2fa-aa19-4100-b241-db77571fb584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.560194] env[67015]: DEBUG oslo_concurrency.lockutils [req-7b2ef624-faa4-4003-a036-738c65b9348e req-949b7179-9c75-41a0-a8b6-884766610092 service nova] Releasing lock "refresh_cache-843278e1-7d76-4f50-8170-9e335d29326e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.240312] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.240652] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.899190] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6fc1e8b9-212f-4cd8-90c1-aa84c166ad2c tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "2d427736-0a1d-4963-9380-6c8d47b39e1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.900340] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6fc1e8b9-212f-4cd8-90c1-aa84c166ad2c tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "2d427736-0a1d-4963-9380-6c8d47b39e1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.484926] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d74084d1-ae71-47a7-8f70-63952ca0ea3f tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "74d737f1-8d72-411f-a0f7-a4483ae6804f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.485181] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d74084d1-ae71-47a7-8f70-63952ca0ea3f tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "74d737f1-8d72-411f-a0f7-a4483ae6804f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.677953] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "843278e1-7d76-4f50-8170-9e335d29326e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.723988] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f68fa909-1400-43ff-aa2a-60d6b49f5d70 tempest-InstanceActionsNegativeTestJSON-7259337 tempest-InstanceActionsNegativeTestJSON-7259337-project-member] Acquiring lock "45e9996a-dee3-4cf3-85ed-c972f27dd7b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.725413] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f68fa909-1400-43ff-aa2a-60d6b49f5d70 tempest-InstanceActionsNegativeTestJSON-7259337 tempest-InstanceActionsNegativeTestJSON-7259337-project-member] Lock "45e9996a-dee3-4cf3-85ed-c972f27dd7b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.105028] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c07553-15e3-4471-b4d4-e10502c6a75a tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Acquiring lock "3b4a01e3-9653-45ba-9bc5-f37443fe0f40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.105327] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c07553-15e3-4471-b4d4-e10502c6a75a tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Lock "3b4a01e3-9653-45ba-9bc5-f37443fe0f40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.644789] env[67015]: DEBUG oslo_concurrency.lockutils [None req-efe5f90f-e0dd-4a42-b586-f7fd89e387ab tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Acquiring lock "b3812d61-be7e-4c30-b59a-1eb59d987954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.645038] env[67015]: DEBUG oslo_concurrency.lockutils [None req-efe5f90f-e0dd-4a42-b586-f7fd89e387ab tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Lock "b3812d61-be7e-4c30-b59a-1eb59d987954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.804675] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1b4cb23-b497-4f35-b58e-211e36500b99 tempest-ImagesOneServerNegativeTestJSON-971646581 tempest-ImagesOneServerNegativeTestJSON-971646581-project-member] Acquiring lock "7658ef83-ea52-41b2-b636-7f4fc7d9deea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.804955] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1b4cb23-b497-4f35-b58e-211e36500b99 tempest-ImagesOneServerNegativeTestJSON-971646581 tempest-ImagesOneServerNegativeTestJSON-971646581-project-member] Lock "7658ef83-ea52-41b2-b636-7f4fc7d9deea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.645014] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e0d57ef6-1d44-4420-851e-df272a254979 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "bdbaaad2-4343-4864-ba52-108b2bff51f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.645624] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e0d57ef6-1d44-4420-851e-df272a254979 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "bdbaaad2-4343-4864-ba52-108b2bff51f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.713308] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc05981c-ced1-4edd-bc43-df9933b064d7 tempest-ServersNegativeTestMultiTenantJSON-331051477 tempest-ServersNegativeTestMultiTenantJSON-331051477-project-member] Acquiring lock "cf721d5b-0a1d-4fa0-a985-eae683e7309f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.713601] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc05981c-ced1-4edd-bc43-df9933b064d7 tempest-ServersNegativeTestMultiTenantJSON-331051477 tempest-ServersNegativeTestMultiTenantJSON-331051477-project-member] Lock "cf721d5b-0a1d-4fa0-a985-eae683e7309f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.515217] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.946032] env[67015]: WARNING oslo_vmware.rw_handles [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1000.946032] env[67015]: ERROR oslo_vmware.rw_handles [ 1000.946395] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1000.948696] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1000.948993] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Copying Virtual Disk [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/4d237136-e578-43db-a912-61669c744cb9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1000.949334] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2c35762-da1b-4ef3-9f9a-427c58034caa {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.957487] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for the task: (returnval){ [ 1000.957487] env[67015]: value = "task-3114454" [ 1000.957487] env[67015]: _type = "Task" [ 1000.957487] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.965702] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Task: {'id': task-3114454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.467627] env[67015]: DEBUG oslo_vmware.exceptions [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1001.468008] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.468564] env[67015]: ERROR nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1001.468564] env[67015]: Faults: ['InvalidArgument'] [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Traceback (most recent call last): [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] yield resources [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self.driver.spawn(context, instance, image_meta, [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self._fetch_image_if_missing(context, vi) [ 1001.468564] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] image_cache(vi, tmp_image_ds_loc) [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] vm_util.copy_virtual_disk( [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] session._wait_for_task(vmdk_copy_task) [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return self.wait_for_task(task_ref) [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return evt.wait() [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] result = hub.switch() [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1001.468881] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return self.greenlet.switch() [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self.f(*self.args, **self.kw) [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] raise exceptions.translate_fault(task_info.error) [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Faults: ['InvalidArgument'] [ 1001.469225] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] [ 1001.469225] env[67015]: INFO nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Terminating instance [ 1001.470481] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.470686] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.471313] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1001.471696] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1001.471755] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e04b5f03-b3e2-449b-a6e6-46bbf1a3c35f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.474219] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a99432a-b0e4-4eeb-8f7f-f852eef6c5a3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.480698] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1001.480908] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63ec121a-ec0c-47ca-9b09-8828cee55e0a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.483121] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.483301] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1001.484247] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2495490-146e-488f-b822-6ea8a1920628 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.489033] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1001.489033] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52b65c4f-b410-d6cb-2543-2eee626e6bf9" [ 1001.489033] env[67015]: _type = "Task" [ 1001.489033] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.496728] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52b65c4f-b410-d6cb-2543-2eee626e6bf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.509318] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.548885] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1001.549250] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1001.549335] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Deleting the datastore file [datastore2] 98ad3e5c-065d-4561-890c-46d5ca0a8f7f {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.549587] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-341d67f6-3eea-4db9-bf1c-920bce773e61 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.556693] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for the task: (returnval){ [ 1001.556693] env[67015]: value = "task-3114456" [ 1001.556693] env[67015]: _type = "Task" [ 1001.556693] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.565708] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Task: {'id': task-3114456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.000945] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1002.001295] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.001555] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3874bb8-3009-40a5-98ad-32bc83400f42 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.017025] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.017025] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Fetch image to [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1002.017025] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1002.017025] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab210d41-c6be-4d4d-a556-93a7aa980968 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.024243] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b0625e-9d9a-4df5-a34d-efd39c5e53d0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.037304] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e56e17-ca70-46e3-89c1-cfb6ee0e627f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.073306] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a321734f-b90d-4258-a8ce-65dcf3935973 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.081833] env[67015]: DEBUG oslo_vmware.api [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Task: {'id': task-3114456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081061} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.083538] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.083822] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1002.087021] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1002.087021] env[67015]: INFO nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1002.087021] env[67015]: DEBUG nova.compute.claims [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1002.087021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.087317] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.089964] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ac865202-4e6b-461b-9693-3f59b7c8e490 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.113964] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1002.201715] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1002.271638] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1002.271747] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1002.678339] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2764c84e-9466-4a8c-a36e-5b79394bb574 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.685922] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a7eae5-c15b-40d4-90c4-294a53afa0e8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.716545] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c13ef05-31a5-40ef-8117-001dc6054366 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.724671] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c01ffd-922c-4ce0-af3f-effc94c48c2a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.743113] env[67015]: DEBUG nova.compute.provider_tree [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.751514] env[67015]: DEBUG nova.scheduler.client.report [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1002.768291] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.681s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.768822] env[67015]: ERROR nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.768822] env[67015]: Faults: ['InvalidArgument'] [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Traceback (most recent call last): [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self.driver.spawn(context, instance, image_meta, [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self._fetch_image_if_missing(context, vi) [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] image_cache(vi, tmp_image_ds_loc) [ 1002.768822] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] vm_util.copy_virtual_disk( [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] session._wait_for_task(vmdk_copy_task) [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return self.wait_for_task(task_ref) [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return evt.wait() [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] result = hub.switch() [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] return self.greenlet.switch() [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1002.769144] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] self.f(*self.args, **self.kw) [ 1002.769484] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1002.769484] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] raise exceptions.translate_fault(task_info.error) [ 1002.769484] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.769484] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Faults: ['InvalidArgument'] [ 1002.769484] env[67015]: ERROR nova.compute.manager [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] [ 1002.769594] env[67015]: DEBUG nova.compute.utils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1002.770927] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Build of instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f was re-scheduled: A specified parameter was not correct: fileType [ 1002.770927] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1002.771500] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1002.771652] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1002.771819] env[67015]: DEBUG nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1002.771983] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1003.344157] env[67015]: DEBUG nova.network.neutron [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.358798] env[67015]: INFO nova.compute.manager [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Took 0.59 seconds to deallocate network for instance. [ 1003.490561] env[67015]: INFO nova.scheduler.client.report [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Deleted allocations for instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f [ 1003.517212] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.517620] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c31448-b624-4689-a403-b234d4a58e72 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 435.345s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.517811] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.519184] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 432.709s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.519184] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] During sync_power_state the instance has a pending task (spawning). Skip. [ 1003.519474] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.520305] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 235.631s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.521306] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Acquiring lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.521516] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.521679] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.523552] env[67015]: INFO nova.compute.manager [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Terminating instance [ 1003.525390] env[67015]: DEBUG nova.compute.manager [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1003.525609] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.525910] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a76b2ef-3f0f-4d9d-94c7-afb117bbebee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.535585] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3aeeaf-2fcb-4563-8d61-d71d71862a11 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.546784] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 4a1f7297-67b0-4c57-8c11-101877c27e48] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.568379] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98ad3e5c-065d-4561-890c-46d5ca0a8f7f could not be found. [ 1003.568379] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1003.568669] env[67015]: INFO nova.compute.manager [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1003.568857] env[67015]: DEBUG oslo.service.loopingcall [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.569155] env[67015]: DEBUG nova.compute.manager [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1003.569259] env[67015]: DEBUG nova.network.neutron [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1003.580733] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 4a1f7297-67b0-4c57-8c11-101877c27e48] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.623265] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "4a1f7297-67b0-4c57-8c11-101877c27e48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.786s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.627975] env[67015]: DEBUG nova.network.neutron [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.638429] env[67015]: INFO nova.compute.manager [-] [instance: 98ad3e5c-065d-4561-890c-46d5ca0a8f7f] Took 0.07 seconds to deallocate network for instance. [ 1003.641225] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 0182f659-5d01-4d6f-8242-aaec4efae151] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.676964] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 0182f659-5d01-4d6f-8242-aaec4efae151] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.686838] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bb000d50-e079-4e0b-a9dd-54be9706a9bf tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "2100b556-11e5-4846-ab1d-5eff53343ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.687190] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bb000d50-e079-4e0b-a9dd-54be9706a9bf tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "2100b556-11e5-4846-ab1d-5eff53343ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.703459] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "0182f659-5d01-4d6f-8242-aaec4efae151" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.836s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.714373] env[67015]: DEBUG nova.compute.manager [None req-e3c847e3-32b3-47f2-a757-389778c000d8 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 00412418-9727-4ed0-b4ff-92981ddab7ce] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.744552] env[67015]: DEBUG nova.compute.manager [None req-e3c847e3-32b3-47f2-a757-389778c000d8 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 00412418-9727-4ed0-b4ff-92981ddab7ce] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.755663] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc756d2-cb21-4f35-98f7-dc8a8c9e1e53 tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "98ad3e5c-065d-4561-890c-46d5ca0a8f7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.236s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.771216] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e3c847e3-32b3-47f2-a757-389778c000d8 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "00412418-9727-4ed0-b4ff-92981ddab7ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.094s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.781806] env[67015]: DEBUG nova.compute.manager [None req-6235af37-c6d1-45f7-94a7-3fe3089b0460 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: b43ae574-1083-42fd-b9aa-d1bf6a2bad2a] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.806217] env[67015]: DEBUG nova.compute.manager [None req-6235af37-c6d1-45f7-94a7-3fe3089b0460 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: b43ae574-1083-42fd-b9aa-d1bf6a2bad2a] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.830334] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6235af37-c6d1-45f7-94a7-3fe3089b0460 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "b43ae574-1083-42fd-b9aa-d1bf6a2bad2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.295s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.839043] env[67015]: DEBUG nova.compute.manager [None req-7044fae6-3465-48b3-aace-86f22cb9eb3d tempest-ServersAaction247Test-1091897607 tempest-ServersAaction247Test-1091897607-project-member] [instance: 6a333c84-7e40-4ba0-b60b-a5972720e306] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.863817] env[67015]: DEBUG nova.compute.manager [None req-7044fae6-3465-48b3-aace-86f22cb9eb3d tempest-ServersAaction247Test-1091897607 tempest-ServersAaction247Test-1091897607-project-member] [instance: 6a333c84-7e40-4ba0-b60b-a5972720e306] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.889317] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7044fae6-3465-48b3-aace-86f22cb9eb3d tempest-ServersAaction247Test-1091897607 tempest-ServersAaction247Test-1091897607-project-member] Lock "6a333c84-7e40-4ba0-b60b-a5972720e306" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.957s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.900188] env[67015]: DEBUG nova.compute.manager [None req-4848b5f0-a200-415b-8bbe-1975fb586126 tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] [instance: 5f04238c-701e-4ea3-9dde-769ec26a4462] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1003.934634] env[67015]: DEBUG nova.compute.manager [None req-4848b5f0-a200-415b-8bbe-1975fb586126 tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] [instance: 5f04238c-701e-4ea3-9dde-769ec26a4462] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1003.964271] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4848b5f0-a200-415b-8bbe-1975fb586126 tempest-VolumesAdminNegativeTest-1180113282 tempest-VolumesAdminNegativeTest-1180113282-project-member] Lock "5f04238c-701e-4ea3-9dde-769ec26a4462" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.307s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.997692] env[67015]: DEBUG nova.compute.manager [None req-07ab390e-63fb-425c-91cb-773279d29100 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 22f73210-0a16-4aa6-bc1d-d6625a6e4243] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1004.024495] env[67015]: DEBUG nova.compute.manager [None req-07ab390e-63fb-425c-91cb-773279d29100 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 22f73210-0a16-4aa6-bc1d-d6625a6e4243] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1004.048844] env[67015]: DEBUG oslo_concurrency.lockutils [None req-07ab390e-63fb-425c-91cb-773279d29100 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "22f73210-0a16-4aa6-bc1d-d6625a6e4243" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.726s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.059731] env[67015]: DEBUG nova.compute.manager [None req-e5000e44-c6be-469c-a6ea-71f26af01d78 tempest-ServerTagsTestJSON-316550150 tempest-ServerTagsTestJSON-316550150-project-member] [instance: 09674d48-8f73-40f7-8ff3-3d4198a053fb] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1004.096599] env[67015]: DEBUG nova.compute.manager [None req-e5000e44-c6be-469c-a6ea-71f26af01d78 tempest-ServerTagsTestJSON-316550150 tempest-ServerTagsTestJSON-316550150-project-member] [instance: 09674d48-8f73-40f7-8ff3-3d4198a053fb] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1004.126078] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e5000e44-c6be-469c-a6ea-71f26af01d78 tempest-ServerTagsTestJSON-316550150 tempest-ServerTagsTestJSON-316550150-project-member] Lock "09674d48-8f73-40f7-8ff3-3d4198a053fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.054s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.138017] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1004.198805] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.198805] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.200438] env[67015]: INFO nova.compute.claims [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.689240] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf8fc90-007f-4e02-9dbf-96d6e47c6ce1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.698070] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b603b580-5bb5-42e5-88ec-db3665f8db67 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.729139] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c923bd8-af30-4d46-bbe1-680887641882 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.736718] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eeee83f-671d-476c-8043-8bbfc46ca776 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.750826] env[67015]: DEBUG nova.compute.provider_tree [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.760740] env[67015]: DEBUG nova.scheduler.client.report [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1004.776810] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.578s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.777344] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1004.833480] env[67015]: DEBUG nova.compute.utils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.839440] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1004.839626] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1004.851244] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1004.929489] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1004.956898] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1004.957173] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1004.957337] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.957523] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1004.957691] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.957867] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1004.958097] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1004.958265] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1004.958434] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1004.958599] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1004.958776] env[67015]: DEBUG nova.virt.hardware [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.959636] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a014bb2-0b4d-46a6-a38f-f55f0f918e60 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.967844] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b6dcea-d3ee-4b8d-9a0c-73b8debf26ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.991909] env[67015]: DEBUG nova.policy [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34f757c06b0f458997e6c2018e881da2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07d7d6f728324bbbb9219d84d5436391', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1005.511850] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1005.542588] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1005.542807] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1006.159097] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Successfully created port: 0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.515325] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.516247] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1006.516247] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1006.539636] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.540479] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.540684] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.540829] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.540961] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541102] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541233] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541352] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541470] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541588] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1006.541707] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1006.542682] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.542885] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.543299] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.557399] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.557631] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.557806] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.558022] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1006.559917] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e84c31-f939-4369-8a74-e4a3177e3095 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.572323] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66b4ddb-43e7-49fd-89ac-cc97b945c8c6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.589740] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3394a0-bc5e-4ac0-a553-643fe847026a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.598939] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61246aaf-9e55-49bf-9416-da92e72e698a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.636018] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181017MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1006.636401] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.636710] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.750290] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 781b688b-ec99-4423-99b2-2502c6e8a75d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.750523] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751522] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751522] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751522] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751522] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751705] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751705] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751705] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.751705] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1006.766821] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2546fa27-1d27-4e23-94f0-e6bdb4b42179 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.780427] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1dc453f0-0983-428f-a186-f61248fe74c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.794240] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0371af11-3654-4009-8e87-dc2c516133be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.810279] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13a015ac-f68b-421c-a397-c3f7d71531fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.825330] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance de8e3930-5fdc-49ca-8bbb-46b34dc32e8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.839022] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance fe41695e-0aee-45a6-b66b-a7185e45fc4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.847376] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "db3de804-63b7-4887-b752-282e70e0f20e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.852511] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.868793] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 92f1e0cd-924f-42a4-a91b-6e9dcede20fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.880215] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.893904] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.908120] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2d427736-0a1d-4963-9380-6c8d47b39e1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.923417] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 74d737f1-8d72-411f-a0f7-a4483ae6804f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.940975] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 45e9996a-dee3-4cf3-85ed-c972f27dd7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.957909] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3b4a01e3-9653-45ba-9bc5-f37443fe0f40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.975061] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b3812d61-be7e-4c30-b59a-1eb59d987954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.986346] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7658ef83-ea52-41b2-b636-7f4fc7d9deea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1006.998996] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bdbaaad2-4343-4864-ba52-108b2bff51f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.015715] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance cf721d5b-0a1d-4fa0-a985-eae683e7309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.031999] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2100b556-11e5-4846-ab1d-5eff53343ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.032335] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1007.032517] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1007.493919] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0962685-f397-49e7-8edc-ddd287d522d9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.501938] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43126462-31de-4c5c-b639-eb72ed9bf813 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.542727] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76dbeb6-3151-4be6-8654-284cd7a5ad36 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.550977] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480c41b5-b188-4475-b6e5-6eadde9fa381 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.565801] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.577014] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Successfully updated port: 0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.584571] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1007.603988] env[67015]: DEBUG nova.compute.manager [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Received event network-vif-plugged-0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.604222] env[67015]: DEBUG oslo_concurrency.lockutils [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] Acquiring lock "db3de804-63b7-4887-b752-282e70e0f20e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.604425] env[67015]: DEBUG oslo_concurrency.lockutils [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] Lock "db3de804-63b7-4887-b752-282e70e0f20e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.604600] env[67015]: DEBUG oslo_concurrency.lockutils [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] Lock "db3de804-63b7-4887-b752-282e70e0f20e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.604766] env[67015]: DEBUG nova.compute.manager [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] No waiting events found dispatching network-vif-plugged-0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.604929] env[67015]: WARNING nova.compute.manager [req-5de3470c-f74f-40d4-9eeb-dff3f5dbf285 req-03fe4ca3-8bb6-4b73-a599-1ad597baf608 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Received unexpected event network-vif-plugged-0aa4cf27-d000-4743-aaa2-9c9d9937d016 for instance with vm_state building and task_state deleting. [ 1007.606791] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.606919] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquired lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.607082] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.658055] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1007.658318] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.022s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.686707] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1008.228965] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updating instance_info_cache with network_info: [{"id": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "address": "fa:16:3e:66:9e:e5", "network": {"id": "d517022a-41cd-456f-b65b-183d807f10ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1282322875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07d7d6f728324bbbb9219d84d5436391", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa4cf27-d0", "ovs_interfaceid": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.253090] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Releasing lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.253424] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance network_info: |[{"id": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "address": "fa:16:3e:66:9e:e5", "network": {"id": "d517022a-41cd-456f-b65b-183d807f10ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1282322875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07d7d6f728324bbbb9219d84d5436391", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa4cf27-d0", "ovs_interfaceid": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1008.253843] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:9e:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aa4cf27-d000-4743-aaa2-9c9d9937d016', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1008.264025] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Creating folder: Project (07d7d6f728324bbbb9219d84d5436391). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1008.264025] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a4fbd01-c88e-4d79-877f-9ca67bbb0173 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.273786] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Created folder: Project (07d7d6f728324bbbb9219d84d5436391) in parent group-v623108. [ 1008.273974] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Creating folder: Instances. Parent ref: group-v623171. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1008.274216] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2e03f62-b94e-4295-865c-839517a5389b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.284591] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Created folder: Instances in parent group-v623171. [ 1008.284591] env[67015]: DEBUG oslo.service.loopingcall [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.284591] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1008.284591] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0388fbd-faf8-4ee7-8f59-3f8386900313 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.305708] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.305708] env[67015]: value = "task-3114459" [ 1008.305708] env[67015]: _type = "Task" [ 1008.305708] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.314168] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114459, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.819630] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114459, 'name': CreateVM_Task, 'duration_secs': 0.32239} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.819897] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1008.820672] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.820842] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.822236] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.822498] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578297ec-71e9-47b3-aebf-789106a95a88 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.830322] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for the task: (returnval){ [ 1008.830322] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52c27068-84bd-dab9-d614-fa4e285608ba" [ 1008.830322] env[67015]: _type = "Task" [ 1008.830322] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.840680] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52c27068-84bd-dab9-d614-fa4e285608ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.352172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.352540] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.352770] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.632801] env[67015]: DEBUG nova.compute.manager [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Received event network-changed-0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.632999] env[67015]: DEBUG nova.compute.manager [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Refreshing instance network info cache due to event network-changed-0aa4cf27-d000-4743-aaa2-9c9d9937d016. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1009.633255] env[67015]: DEBUG oslo_concurrency.lockutils [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] Acquiring lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.633401] env[67015]: DEBUG oslo_concurrency.lockutils [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] Acquired lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.633561] env[67015]: DEBUG nova.network.neutron [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Refreshing network info cache for port 0aa4cf27-d000-4743-aaa2-9c9d9937d016 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.571767] env[67015]: DEBUG nova.network.neutron [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updated VIF entry in instance network info cache for port 0aa4cf27-d000-4743-aaa2-9c9d9937d016. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.572141] env[67015]: DEBUG nova.network.neutron [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updating instance_info_cache with network_info: [{"id": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "address": "fa:16:3e:66:9e:e5", "network": {"id": "d517022a-41cd-456f-b65b-183d807f10ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1282322875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07d7d6f728324bbbb9219d84d5436391", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aa4cf27-d0", "ovs_interfaceid": "0aa4cf27-d000-4743-aaa2-9c9d9937d016", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.584088] env[67015]: DEBUG oslo_concurrency.lockutils [req-35a06db4-bb3f-43de-8684-8350f58da40b req-ca4fdcd8-a219-4930-95ef-e5ac2168b208 service nova] Releasing lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.222178] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "7b744243-c7e5-4253-9273-9d7f84772d96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.222485] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.437209] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.437507] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.223702] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c93f7f52-e61a-4337-a511-b62748cef194 tempest-ServerActionsTestOtherA-1420094941 tempest-ServerActionsTestOtherA-1420094941-project-member] Acquiring lock "3c14fe11-5172-4611-acf8-c29746a5658e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.223986] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c93f7f52-e61a-4337-a511-b62748cef194 tempest-ServerActionsTestOtherA-1420094941 tempest-ServerActionsTestOtherA-1420094941-project-member] Lock "3c14fe11-5172-4611-acf8-c29746a5658e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.599359] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fec408af-71d4-4b95-994d-e15a1b6e2fbb tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "6d6a0e44-f9b4-4da4-948b-b05b86c93a3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.599626] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fec408af-71d4-4b95-994d-e15a1b6e2fbb tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6d6a0e44-f9b4-4da4-948b-b05b86c93a3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.739081] env[67015]: DEBUG oslo_concurrency.lockutils [None req-73e9b7b3-7756-402e-b1da-57f4cfa8e817 tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Acquiring lock "13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.739081] env[67015]: DEBUG oslo_concurrency.lockutils [None req-73e9b7b3-7756-402e-b1da-57f4cfa8e817 tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.401466] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16e504d0-81d5-46ba-b76c-e81f03db9c1b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Acquiring lock "3fda5b6d-44b1-412a-9eff-0e8be3c725c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.401776] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16e504d0-81d5-46ba-b76c-e81f03db9c1b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "3fda5b6d-44b1-412a-9eff-0e8be3c725c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.555151] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cd6196ed-5ab7-4b98-8652-b2ed5d1cd70e tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Acquiring lock "2698cd39-5b23-48b8-ae60-48f7576c1546" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.555389] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cd6196ed-5ab7-4b98-8652-b2ed5d1cd70e tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Lock "2698cd39-5b23-48b8-ae60-48f7576c1546" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.189637] env[67015]: DEBUG oslo_concurrency.lockutils [None req-30872710-9e00-43a6-9de8-e712d8c5923b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Acquiring lock "2b1b82aa-9f54-4829-98bf-011f6289a534" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.189920] env[67015]: DEBUG oslo_concurrency.lockutils [None req-30872710-9e00-43a6-9de8-e712d8c5923b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "2b1b82aa-9f54-4829-98bf-011f6289a534" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.192077] env[67015]: DEBUG oslo_concurrency.lockutils [None req-05235453-ab08-4e7a-806a-954b35f68b31 tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Acquiring lock "46a76bbb-28ff-4b71-aa4e-f946ef586b64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.192287] env[67015]: DEBUG oslo_concurrency.lockutils [None req-05235453-ab08-4e7a-806a-954b35f68b31 tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Lock "46a76bbb-28ff-4b71-aa4e-f946ef586b64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.349531] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f0d2db5d-6d0f-4fc0-bf25-ecddd6101337 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Acquiring lock "a51672b6-f918-4ba3-9c55-af2edb3ec693" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.349800] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f0d2db5d-6d0f-4fc0-bf25-ecddd6101337 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "a51672b6-f918-4ba3-9c55-af2edb3ec693" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.796521] env[67015]: WARNING oslo_vmware.rw_handles [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1050.796521] env[67015]: ERROR oslo_vmware.rw_handles [ 1050.797140] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1050.799018] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1050.799292] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Copying Virtual Disk [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/2b213559-0a7c-4069-b78d-2c24d87c3ab3/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1050.799594] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae8d234b-2dfa-4665-b4ec-346301f2c8e3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.808974] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1050.808974] env[67015]: value = "task-3114460" [ 1050.808974] env[67015]: _type = "Task" [ 1050.808974] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.817539] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.318504] env[67015]: DEBUG oslo_vmware.exceptions [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1051.318791] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.319357] env[67015]: ERROR nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1051.319357] env[67015]: Faults: ['InvalidArgument'] [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Traceback (most recent call last): [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] yield resources [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self.driver.spawn(context, instance, image_meta, [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self._fetch_image_if_missing(context, vi) [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1051.319357] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] image_cache(vi, tmp_image_ds_loc) [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] vm_util.copy_virtual_disk( [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] session._wait_for_task(vmdk_copy_task) [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return self.wait_for_task(task_ref) [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return evt.wait() [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] result = hub.switch() [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return self.greenlet.switch() [ 1051.319747] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self.f(*self.args, **self.kw) [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] raise exceptions.translate_fault(task_info.error) [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Faults: ['InvalidArgument'] [ 1051.320131] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] [ 1051.320131] env[67015]: INFO nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Terminating instance [ 1051.321259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.321467] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.322116] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1051.322320] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1051.322541] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58f202d3-9271-4688-8353-991f48cc8499 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.324886] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246d4062-34dc-42a0-9328-81cb16445903 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.331109] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1051.331314] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-069273ff-5826-4ef6-bca2-592526eb6d88 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.333316] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.333490] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1051.334414] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8147b255-3f61-40b3-a9c9-274225631755 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.339130] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for the task: (returnval){ [ 1051.339130] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52fb0d3f-bf65-75f0-c8e6-ea8e21d301d0" [ 1051.339130] env[67015]: _type = "Task" [ 1051.339130] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.348474] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52fb0d3f-bf65-75f0-c8e6-ea8e21d301d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.397308] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1051.397516] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1051.397693] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleting the datastore file [datastore2] 781b688b-ec99-4423-99b2-2502c6e8a75d {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.397961] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-593a3564-0e04-4c64-9749-9633d9507b6d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.403686] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1051.403686] env[67015]: value = "task-3114462" [ 1051.403686] env[67015]: _type = "Task" [ 1051.403686] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.410853] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.850604] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1051.850835] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Creating directory with path [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.851108] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47bf7f13-dfab-4065-b20f-e40f50210f70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.862175] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Created directory with path [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.862845] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Fetch image to [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1051.862845] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1051.863279] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690a0fed-8d5a-4a0c-b6fb-06e5bb03dd8c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.871175] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185c7f7b-516e-4de6-9b42-fff9ab40bf3f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.880061] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31205cfb-6f80-443d-ae01-e62d2395515f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.913248] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7d08e5-32e4-4406-94fa-a712bafaee01 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.920530] env[67015]: DEBUG oslo_vmware.api [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073888} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.921905] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.922122] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1051.922311] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1051.922439] env[67015]: INFO nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1051.924309] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0b596f6a-713f-4e06-bec7-c69439dbbf4c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.927026] env[67015]: DEBUG nova.compute.claims [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1051.927026] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.927026] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.949187] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1052.003832] env[67015]: DEBUG oslo_vmware.rw_handles [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1052.068030] env[67015]: DEBUG oslo_vmware.rw_handles [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1052.068030] env[67015]: DEBUG oslo_vmware.rw_handles [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1052.390779] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e42cd2-d5dd-4297-ba16-f476261f874a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.397931] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd69f17-73f4-4ec8-864d-b418644b968b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.427543] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822d302e-f68d-4b22-bfdd-f8c8f70d0fa0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.434086] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f703e33b-d8a2-45a9-854b-edf16709fe24 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.446414] env[67015]: DEBUG nova.compute.provider_tree [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.456749] env[67015]: DEBUG nova.scheduler.client.report [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1052.470368] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.544s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.470858] env[67015]: ERROR nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1052.470858] env[67015]: Faults: ['InvalidArgument'] [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Traceback (most recent call last): [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self.driver.spawn(context, instance, image_meta, [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self._fetch_image_if_missing(context, vi) [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] image_cache(vi, tmp_image_ds_loc) [ 1052.470858] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] vm_util.copy_virtual_disk( [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] session._wait_for_task(vmdk_copy_task) [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return self.wait_for_task(task_ref) [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return evt.wait() [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] result = hub.switch() [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] return self.greenlet.switch() [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1052.471218] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] self.f(*self.args, **self.kw) [ 1052.471597] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1052.471597] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] raise exceptions.translate_fault(task_info.error) [ 1052.471597] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1052.471597] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Faults: ['InvalidArgument'] [ 1052.471597] env[67015]: ERROR nova.compute.manager [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] [ 1052.471597] env[67015]: DEBUG nova.compute.utils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1052.472856] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Build of instance 781b688b-ec99-4423-99b2-2502c6e8a75d was re-scheduled: A specified parameter was not correct: fileType [ 1052.472856] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1052.473265] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1052.473443] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1052.473613] env[67015]: DEBUG nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1052.473786] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1052.867210] env[67015]: DEBUG nova.network.neutron [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.883288] env[67015]: INFO nova.compute.manager [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Took 0.41 seconds to deallocate network for instance. [ 1052.987230] env[67015]: INFO nova.scheduler.client.report [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted allocations for instance 781b688b-ec99-4423-99b2-2502c6e8a75d [ 1053.006901] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f558b86e-6ffa-4ce5-8ac4-a942b0f9af24 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 484.795s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.010147] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 482.199s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.010147] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1053.010147] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.010147] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 285.477s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.010332] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.010515] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.010662] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.012497] env[67015]: INFO nova.compute.manager [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Terminating instance [ 1053.014193] env[67015]: DEBUG nova.compute.manager [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1053.014391] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1053.014648] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9eac85d7-6375-422d-825d-5903df7c7ffc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.023642] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8597e568-59cc-4fd2-a847-3be958020196 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.037399] env[67015]: DEBUG nova.compute.manager [None req-fbe08c8e-af44-4d57-8db2-2343af481a69 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: ef201b80-65b2-4fa1-8150-8b7a3fbea673] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.056972] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 781b688b-ec99-4423-99b2-2502c6e8a75d could not be found. [ 1053.057214] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1053.057410] env[67015]: INFO nova.compute.manager [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1053.057671] env[67015]: DEBUG oslo.service.loopingcall [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.057911] env[67015]: DEBUG nova.compute.manager [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1053.058035] env[67015]: DEBUG nova.network.neutron [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1053.060719] env[67015]: DEBUG nova.compute.manager [None req-fbe08c8e-af44-4d57-8db2-2343af481a69 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: ef201b80-65b2-4fa1-8150-8b7a3fbea673] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.085724] env[67015]: DEBUG nova.network.neutron [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.087910] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fbe08c8e-af44-4d57-8db2-2343af481a69 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "ef201b80-65b2-4fa1-8150-8b7a3fbea673" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.942s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.096657] env[67015]: INFO nova.compute.manager [-] [instance: 781b688b-ec99-4423-99b2-2502c6e8a75d] Took 0.04 seconds to deallocate network for instance. [ 1053.106128] env[67015]: DEBUG nova.compute.manager [None req-9bc8a9d4-548d-4bb8-8faa-8f424aae3840 tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 2546fa27-1d27-4e23-94f0-e6bdb4b42179] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.132741] env[67015]: DEBUG nova.compute.manager [None req-9bc8a9d4-548d-4bb8-8faa-8f424aae3840 tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 2546fa27-1d27-4e23-94f0-e6bdb4b42179] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.159552] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9bc8a9d4-548d-4bb8-8faa-8f424aae3840 tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "2546fa27-1d27-4e23-94f0-e6bdb4b42179" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.979s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.169064] env[67015]: DEBUG nova.compute.manager [None req-f1786543-32b2-42f0-8a9e-e70a34245ac1 tempest-ServerShowV257Test-1044046039 tempest-ServerShowV257Test-1044046039-project-member] [instance: 1dc453f0-0983-428f-a186-f61248fe74c3] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.200261] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6a036bcc-32cc-46ad-8a2e-3a42b3ee4e6e tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "781b688b-ec99-4423-99b2-2502c6e8a75d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.202498] env[67015]: DEBUG nova.compute.manager [None req-f1786543-32b2-42f0-8a9e-e70a34245ac1 tempest-ServerShowV257Test-1044046039 tempest-ServerShowV257Test-1044046039-project-member] [instance: 1dc453f0-0983-428f-a186-f61248fe74c3] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.225153] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1786543-32b2-42f0-8a9e-e70a34245ac1 tempest-ServerShowV257Test-1044046039 tempest-ServerShowV257Test-1044046039-project-member] Lock "1dc453f0-0983-428f-a186-f61248fe74c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.179s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.235159] env[67015]: DEBUG nova.compute.manager [None req-f4d8d90a-600e-4b03-af5b-4b0a128f3867 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] [instance: 0371af11-3654-4009-8e87-dc2c516133be] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.258843] env[67015]: DEBUG nova.compute.manager [None req-f4d8d90a-600e-4b03-af5b-4b0a128f3867 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] [instance: 0371af11-3654-4009-8e87-dc2c516133be] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.280756] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f4d8d90a-600e-4b03-af5b-4b0a128f3867 tempest-SecurityGroupsTestJSON-611676208 tempest-SecurityGroupsTestJSON-611676208-project-member] Lock "0371af11-3654-4009-8e87-dc2c516133be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.883s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.290436] env[67015]: DEBUG nova.compute.manager [None req-6c4aaec3-76e8-404d-a9ba-cf316de5d6be tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] [instance: 13a015ac-f68b-421c-a397-c3f7d71531fb] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.313241] env[67015]: DEBUG nova.compute.manager [None req-6c4aaec3-76e8-404d-a9ba-cf316de5d6be tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] [instance: 13a015ac-f68b-421c-a397-c3f7d71531fb] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.335453] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6c4aaec3-76e8-404d-a9ba-cf316de5d6be tempest-ServersTestMultiNic-1356030888 tempest-ServersTestMultiNic-1356030888-project-member] Lock "13a015ac-f68b-421c-a397-c3f7d71531fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.939s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.346183] env[67015]: DEBUG nova.compute.manager [None req-16dad952-1ea6-4706-bfd1-672487196b9d tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: de8e3930-5fdc-49ca-8bbb-46b34dc32e8f] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.369200] env[67015]: DEBUG nova.compute.manager [None req-16dad952-1ea6-4706-bfd1-672487196b9d tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: de8e3930-5fdc-49ca-8bbb-46b34dc32e8f] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.389983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16dad952-1ea6-4706-bfd1-672487196b9d tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "de8e3930-5fdc-49ca-8bbb-46b34dc32e8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.453s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.399137] env[67015]: DEBUG nova.compute.manager [None req-afec58e4-964d-415d-95d0-328fda8496b4 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: fe41695e-0aee-45a6-b66b-a7185e45fc4a] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.421420] env[67015]: DEBUG nova.compute.manager [None req-afec58e4-964d-415d-95d0-328fda8496b4 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: fe41695e-0aee-45a6-b66b-a7185e45fc4a] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1053.442430] env[67015]: DEBUG oslo_concurrency.lockutils [None req-afec58e4-964d-415d-95d0-328fda8496b4 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "fe41695e-0aee-45a6-b66b-a7185e45fc4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.429s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.454067] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1053.505721] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.506022] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.508157] env[67015]: INFO nova.compute.claims [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.930968] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef757b8-2154-4acd-a571-2f5b8be7f9d5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.939462] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35125a10-7f1e-4d03-89bb-6a842d90f4a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.968710] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8650fb0e-9ef3-45ee-814e-6786d489ceb7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.975888] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ed0829-4cf0-45e9-9bcf-133e677cf5a5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.988903] env[67015]: DEBUG nova.compute.provider_tree [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.000910] env[67015]: DEBUG nova.scheduler.client.report [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1054.015744] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.510s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.016310] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1054.051405] env[67015]: DEBUG nova.compute.utils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1054.052882] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1054.053128] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1054.062514] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1054.115835] env[67015]: DEBUG nova.policy [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee8a56b7100b45be9cc7d3c97341051f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105905a257424bb5adffc9b70943494d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1054.120717] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1054.145831] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1054.146116] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1054.146285] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.146468] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1054.146614] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.146760] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1054.146984] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1054.147200] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1054.147376] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1054.147542] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1054.147713] env[67015]: DEBUG nova.virt.hardware [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.148557] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16667ac-8a5d-4df5-a7c0-773dd788535c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.156193] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fd84fb-cfb5-4a89-9314-4c22b088766a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.526703] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Successfully created port: 5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.478999] env[67015]: DEBUG nova.compute.manager [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Received event network-vif-plugged-5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.479269] env[67015]: DEBUG oslo_concurrency.lockutils [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] Acquiring lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.479510] env[67015]: DEBUG oslo_concurrency.lockutils [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.479703] env[67015]: DEBUG oslo_concurrency.lockutils [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.479908] env[67015]: DEBUG nova.compute.manager [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] No waiting events found dispatching network-vif-plugged-5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1055.480059] env[67015]: WARNING nova.compute.manager [req-e862d283-cb92-4a07-82c1-d9a69da37c80 req-3dd04fa8-21e7-4151-b6b4-616cf5f6b8b5 service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Received unexpected event network-vif-plugged-5268138b-7fd7-4842-a3d9-68802568c916 for instance with vm_state building and task_state spawning. [ 1055.556740] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Successfully updated port: 5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1055.570415] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.570682] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.570923] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1055.628356] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1056.128145] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Updating instance_info_cache with network_info: [{"id": "5268138b-7fd7-4842-a3d9-68802568c916", "address": "fa:16:3e:e8:3c:08", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5268138b-7f", "ovs_interfaceid": "5268138b-7fd7-4842-a3d9-68802568c916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.145663] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.146044] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance network_info: |[{"id": "5268138b-7fd7-4842-a3d9-68802568c916", "address": "fa:16:3e:e8:3c:08", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5268138b-7f", "ovs_interfaceid": "5268138b-7fd7-4842-a3d9-68802568c916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1056.146863] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:3c:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5268138b-7fd7-4842-a3d9-68802568c916', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.154454] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating folder: Project (105905a257424bb5adffc9b70943494d). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1056.155084] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f28ba7-ba33-4f2d-ba2c-b33f9baf1cf8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.167562] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created folder: Project (105905a257424bb5adffc9b70943494d) in parent group-v623108. [ 1056.167764] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating folder: Instances. Parent ref: group-v623174. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1056.168016] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b20c8d3-9e70-4133-a301-18eaae8a8630 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.177875] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created folder: Instances in parent group-v623174. [ 1056.178138] env[67015]: DEBUG oslo.service.loopingcall [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.178342] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1056.178596] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b533827-b7fd-4781-90b1-f3113d0f14f7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.197717] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.197717] env[67015]: value = "task-3114465" [ 1056.197717] env[67015]: _type = "Task" [ 1056.197717] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.205622] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114465, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.708657] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114465, 'name': CreateVM_Task, 'duration_secs': 0.348145} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.708943] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1056.709703] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.709898] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.710229] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1056.710525] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4517fd71-3b4d-4dc0-8708-4c180352e704 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.715243] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1056.715243] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5213935b-7741-0260-cf23-d88e71c28ae4" [ 1056.715243] env[67015]: _type = "Task" [ 1056.715243] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.725916] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5213935b-7741-0260-cf23-d88e71c28ae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.225484] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.225758] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.226021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.574895] env[67015]: DEBUG nova.compute.manager [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Received event network-changed-5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.575166] env[67015]: DEBUG nova.compute.manager [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Refreshing instance network info cache due to event network-changed-5268138b-7fd7-4842-a3d9-68802568c916. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1057.575358] env[67015]: DEBUG oslo_concurrency.lockutils [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] Acquiring lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.575563] env[67015]: DEBUG oslo_concurrency.lockutils [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] Acquired lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.575742] env[67015]: DEBUG nova.network.neutron [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Refreshing network info cache for port 5268138b-7fd7-4842-a3d9-68802568c916 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1057.999161] env[67015]: DEBUG nova.network.neutron [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Updated VIF entry in instance network info cache for port 5268138b-7fd7-4842-a3d9-68802568c916. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1057.999599] env[67015]: DEBUG nova.network.neutron [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Updating instance_info_cache with network_info: [{"id": "5268138b-7fd7-4842-a3d9-68802568c916", "address": "fa:16:3e:e8:3c:08", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5268138b-7f", "ovs_interfaceid": "5268138b-7fd7-4842-a3d9-68802568c916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.011671] env[67015]: DEBUG oslo_concurrency.lockutils [req-3792f87f-935a-4049-9e66-d15af5eda20f req-5f675381-454c-4260-a015-51bf4cc2190e service nova] Releasing lock "refresh_cache-8c919afe-37b6-47f0-b939-d9df5800d7ee" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.652616] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.652897] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.513522] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.514686] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.514326] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.514652] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.514825] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.514971] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1067.515139] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.526322] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.526576] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.526836] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.527015] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1067.528130] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94501438-4357-4f35-85b2-1333f6bedeb7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.537436] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6c29d0-0dc5-4b88-86ef-1435a6408d1f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.551518] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a7ff2c-e63f-428b-a466-e4d30ed899a3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.557745] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39726726-769d-4e5c-83f6-2de39e0cbf0d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.586071] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181035MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1067.586216] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.586413] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.663300] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.663470] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.663596] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.663718] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.663838] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.664017] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.664129] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.664252] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.664367] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.664480] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1067.674876] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 92f1e0cd-924f-42a4-a91b-6e9dcede20fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.684498] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.693984] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.703165] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2d427736-0a1d-4963-9380-6c8d47b39e1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.712211] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 74d737f1-8d72-411f-a0f7-a4483ae6804f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.721907] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 45e9996a-dee3-4cf3-85ed-c972f27dd7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.731347] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3b4a01e3-9653-45ba-9bc5-f37443fe0f40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.741497] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b3812d61-be7e-4c30-b59a-1eb59d987954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.751076] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7658ef83-ea52-41b2-b636-7f4fc7d9deea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.760269] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bdbaaad2-4343-4864-ba52-108b2bff51f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.769612] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance cf721d5b-0a1d-4fa0-a985-eae683e7309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.779174] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2100b556-11e5-4846-ab1d-5eff53343ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.788754] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.798418] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.808279] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3c14fe11-5172-4611-acf8-c29746a5658e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.817629] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d6a0e44-f9b4-4da4-948b-b05b86c93a3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.827400] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.836642] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3fda5b6d-44b1-412a-9eff-0e8be3c725c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.845683] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2698cd39-5b23-48b8-ae60-48f7576c1546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.854878] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2b1b82aa-9f54-4829-98bf-011f6289a534 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.864190] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 46a76bbb-28ff-4b71-aa4e-f946ef586b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.873463] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a51672b6-f918-4ba3-9c55-af2edb3ec693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1067.873703] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1067.873853] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1068.235123] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d85dfc-f6eb-4d07-9160-350aa9491d8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.243021] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c44d5a3-2883-4f0a-8719-b046fac88327 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.273688] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fbeb79-a4e5-435d-93af-891059e4161a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.280659] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d68d89c-2c8b-4cfa-9526-dcd945178d04 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.293646] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.302449] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1068.316662] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1068.316839] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.730s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.317412] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.317735] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1069.317735] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1069.337481] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.337647] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.337783] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.337914] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.338052] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.338182] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.338304] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.338424] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.339023] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.339023] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1069.339023] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1077.941294] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.690199] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.690510] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.976545] env[67015]: WARNING oslo_vmware.rw_handles [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1100.976545] env[67015]: ERROR oslo_vmware.rw_handles [ 1100.977120] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1100.979033] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1100.979371] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Copying Virtual Disk [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/3cd5f848-8a61-41e3-9586-b2a43700eaca/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1100.979783] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f80133e-55ca-4a6c-b3aa-0a005d27a3ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.989607] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for the task: (returnval){ [ 1100.989607] env[67015]: value = "task-3114466" [ 1100.989607] env[67015]: _type = "Task" [ 1100.989607] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.997492] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Task: {'id': task-3114466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.501187] env[67015]: DEBUG oslo_vmware.exceptions [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1101.501477] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.502028] env[67015]: ERROR nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1101.502028] env[67015]: Faults: ['InvalidArgument'] [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Traceback (most recent call last): [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] yield resources [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self.driver.spawn(context, instance, image_meta, [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self._fetch_image_if_missing(context, vi) [ 1101.502028] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] image_cache(vi, tmp_image_ds_loc) [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] vm_util.copy_virtual_disk( [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] session._wait_for_task(vmdk_copy_task) [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return self.wait_for_task(task_ref) [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return evt.wait() [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] result = hub.switch() [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1101.502417] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return self.greenlet.switch() [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self.f(*self.args, **self.kw) [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] raise exceptions.translate_fault(task_info.error) [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Faults: ['InvalidArgument'] [ 1101.502802] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] [ 1101.502802] env[67015]: INFO nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Terminating instance [ 1101.503944] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.504880] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.504880] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ed319f2-c614-4466-8a6f-dbe6acc00262 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.506657] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1101.506895] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1101.507765] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431f9c2f-01fd-418a-8b21-77af5e3927aa {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.514884] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1101.515135] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbc289ef-8573-4ff0-94f2-eb27d4d18abf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.517540] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.517757] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1101.518782] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4734398c-12ef-49bb-86bf-b26fe3f382bb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.523587] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for the task: (returnval){ [ 1101.523587] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]528dc2b4-a0eb-e0ed-faf1-95727c1958b6" [ 1101.523587] env[67015]: _type = "Task" [ 1101.523587] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.531299] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]528dc2b4-a0eb-e0ed-faf1-95727c1958b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.585555] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1101.585776] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1101.585994] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Deleting the datastore file [datastore2] 0734f630-dea5-4ee0-b890-dd50f3e8b178 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.586358] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7c3f2dc-88d7-4128-aa50-5ef26ae0f442 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.592497] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for the task: (returnval){ [ 1101.592497] env[67015]: value = "task-3114468" [ 1101.592497] env[67015]: _type = "Task" [ 1101.592497] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.600068] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Task: {'id': task-3114468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.033756] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1102.034101] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Creating directory with path [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.034101] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1836c31-15c6-4b13-8b71-b33080fcf2ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.045643] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Created directory with path [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.045955] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Fetch image to [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1102.046220] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1102.047028] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa80641-9952-45ad-ad59-c43f164fcfa2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.053819] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd72c899-291f-437a-88c3-3b43de4572ea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.064476] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5373c69-d512-4a04-b2e2-64e24dca2466 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.098242] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8365b51-2187-476a-8569-db774246c210 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.104974] env[67015]: DEBUG oslo_vmware.api [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Task: {'id': task-3114468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080935} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.106372] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.106566] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1102.106770] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1102.106938] env[67015]: INFO nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1102.108758] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b0b0ee4-4480-4fdb-bbff-78d108f77754 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.111361] env[67015]: DEBUG nova.compute.claims [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1102.111546] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.111770] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.128810] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1102.189227] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1102.250412] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1102.250633] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1102.570109] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9a341a-e28d-453c-b2b5-1dace35f2df4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.577786] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d4036b-e616-4000-afcc-be99c94473b4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.606416] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5829c943-9c11-4d53-a129-c6eacd39c8a6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.612962] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca55ecb-6636-411e-8b63-439866f6331c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.625412] env[67015]: DEBUG nova.compute.provider_tree [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.633848] env[67015]: DEBUG nova.scheduler.client.report [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1102.649341] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.537s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.649865] env[67015]: ERROR nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1102.649865] env[67015]: Faults: ['InvalidArgument'] [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Traceback (most recent call last): [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self.driver.spawn(context, instance, image_meta, [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self._fetch_image_if_missing(context, vi) [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] image_cache(vi, tmp_image_ds_loc) [ 1102.649865] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] vm_util.copy_virtual_disk( [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] session._wait_for_task(vmdk_copy_task) [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return self.wait_for_task(task_ref) [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return evt.wait() [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] result = hub.switch() [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] return self.greenlet.switch() [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1102.650261] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] self.f(*self.args, **self.kw) [ 1102.650626] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1102.650626] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] raise exceptions.translate_fault(task_info.error) [ 1102.650626] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1102.650626] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Faults: ['InvalidArgument'] [ 1102.650626] env[67015]: ERROR nova.compute.manager [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] [ 1102.650626] env[67015]: DEBUG nova.compute.utils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1102.652013] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Build of instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 was re-scheduled: A specified parameter was not correct: fileType [ 1102.652013] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1102.652388] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1102.652563] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1102.652715] env[67015]: DEBUG nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1102.652876] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1102.972702] env[67015]: DEBUG nova.network.neutron [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.984354] env[67015]: INFO nova.compute.manager [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Took 0.33 seconds to deallocate network for instance. [ 1103.086797] env[67015]: INFO nova.scheduler.client.report [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Deleted allocations for instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 [ 1103.110528] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7ca95b7-c5c1-4964-890f-2ff5523e21b3 tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 534.242s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.111388] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 335.286s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.111559] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Acquiring lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.111767] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.111934] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.113923] env[67015]: INFO nova.compute.manager [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Terminating instance [ 1103.115754] env[67015]: DEBUG nova.compute.manager [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1103.115948] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1103.117056] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebe0f973-3ed2-4266-a153-8fc0e9cca557 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.127703] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01c7520-5603-4c23-bfbf-d54f0f8974fb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.138771] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 92f1e0cd-924f-42a4-a91b-6e9dcede20fc] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1103.159558] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0734f630-dea5-4ee0-b890-dd50f3e8b178 could not be found. [ 1103.161022] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1103.161022] env[67015]: INFO nova.compute.manager [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1103.161022] env[67015]: DEBUG oslo.service.loopingcall [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1103.161022] env[67015]: DEBUG nova.compute.manager [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1103.161022] env[67015]: DEBUG nova.network.neutron [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1103.164754] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 92f1e0cd-924f-42a4-a91b-6e9dcede20fc] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1103.192790] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "92f1e0cd-924f-42a4-a91b-6e9dcede20fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.394s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.193063] env[67015]: DEBUG nova.network.neutron [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.201247] env[67015]: INFO nova.compute.manager [-] [instance: 0734f630-dea5-4ee0-b890-dd50f3e8b178] Took 0.04 seconds to deallocate network for instance. [ 1103.203313] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1103.254941] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.255220] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.256704] env[67015]: INFO nova.compute.claims [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1103.289502] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6daea0da-c076-4553-8038-572c7b9641ac tempest-ServerExternalEventsTest-937991817 tempest-ServerExternalEventsTest-937991817-project-member] Lock "0734f630-dea5-4ee0-b890-dd50f3e8b178" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.662666] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cad50a-6060-4796-8afe-ba318a6d2e06 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.670702] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5cda60-2730-44ca-bae3-b0bbf9e48836 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.700019] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c7c97e-dd85-4a36-8e61-d190305bd8c4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.706863] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c9749a-7214-4310-bddf-f56684d9c2a7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.719536] env[67015]: DEBUG nova.compute.provider_tree [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.728133] env[67015]: DEBUG nova.scheduler.client.report [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1103.741850] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.487s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.742328] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1103.785883] env[67015]: DEBUG nova.compute.utils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1103.787274] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1103.787405] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1103.796614] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1103.856994] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1103.867015] env[67015]: DEBUG nova.policy [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48441807d0a54004a9ad41afca6ef53a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58cb53c4e60c4084a1211154a2c2b12a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1103.882812] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.883124] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.883302] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.883501] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.883649] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.883800] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.884020] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.884209] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.884400] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.884569] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.884747] env[67015]: DEBUG nova.virt.hardware [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.885619] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145ee85c-0a47-4d60-abdb-52847987307c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.893482] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0b542b-9021-48f9-9f6f-da5ccfd800bb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.254245] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Successfully created port: e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.302728] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "437b21d5-932d-4216-b7f7-17c6eab2665f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.481334] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Successfully updated port: e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.487326] env[67015]: DEBUG nova.compute.manager [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Received event network-vif-plugged-e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1105.487326] env[67015]: DEBUG oslo_concurrency.lockutils [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] Acquiring lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.487326] env[67015]: DEBUG oslo_concurrency.lockutils [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.487326] env[67015]: DEBUG oslo_concurrency.lockutils [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.487873] env[67015]: DEBUG nova.compute.manager [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] No waiting events found dispatching network-vif-plugged-e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1105.487873] env[67015]: WARNING nova.compute.manager [req-a6d2d768-89e6-4187-af81-3258d291aaae req-2d4dc056-a88b-4fd3-a67f-eb2fdb2b9b45 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Received unexpected event network-vif-plugged-e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 for instance with vm_state building and task_state deleting. [ 1105.491636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.491636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.491636] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1105.575295] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1105.863276] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Updating instance_info_cache with network_info: [{"id": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "address": "fa:16:3e:ad:78:aa", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c9cfe9-56", "ovs_interfaceid": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.876602] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.876977] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance network_info: |[{"id": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "address": "fa:16:3e:ad:78:aa", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c9cfe9-56", "ovs_interfaceid": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1105.877414] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:78:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.884728] env[67015]: DEBUG oslo.service.loopingcall [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.885227] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1105.885451] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f79c386-2a89-4c32-933b-916d383261db {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.906505] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.906505] env[67015]: value = "task-3114469" [ 1105.906505] env[67015]: _type = "Task" [ 1105.906505] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.914288] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114469, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.416831] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114469, 'name': CreateVM_Task, 'duration_secs': 0.282706} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.417144] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1106.417728] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.417891] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.418233] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1106.418479] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b201be44-9cc0-4c81-ab11-bd5aa2cb661a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.422708] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 1106.422708] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52ee07af-1a86-1bb1-ffc1-a35f3a4bd65f" [ 1106.422708] env[67015]: _type = "Task" [ 1106.422708] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.430013] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52ee07af-1a86-1bb1-ffc1-a35f3a4bd65f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.935031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.935323] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.935539] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.501640] env[67015]: DEBUG nova.compute.manager [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Received event network-changed-e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.501872] env[67015]: DEBUG nova.compute.manager [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Refreshing instance network info cache due to event network-changed-e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1107.502069] env[67015]: DEBUG oslo_concurrency.lockutils [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] Acquiring lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.502224] env[67015]: DEBUG oslo_concurrency.lockutils [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] Acquired lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.502383] env[67015]: DEBUG nova.network.neutron [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Refreshing network info cache for port e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1108.140530] env[67015]: DEBUG nova.network.neutron [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Updated VIF entry in instance network info cache for port e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1108.140903] env[67015]: DEBUG nova.network.neutron [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Updating instance_info_cache with network_info: [{"id": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "address": "fa:16:3e:ad:78:aa", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c9cfe9-56", "ovs_interfaceid": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.151449] env[67015]: DEBUG oslo_concurrency.lockutils [req-0e44ef92-1d83-4cdd-b445-7ecc7ee881a9 req-4f86a7b2-e15a-4ff7-a966-0430c1fe0d54 service nova] Releasing lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.514624] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.514921] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.515064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.515064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.515449] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1123.528190] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1124.514561] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.521525] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.521869] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.521917] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.522082] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1128.525680] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.525680] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.525680] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1128.525680] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.175308] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.175512] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.175679] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.175839] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1129.176998] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb29877-af50-4b1a-98ac-49a6be1a93e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.185278] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfe8286-8ed0-445b-ae60-88a2dfa17496 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.198235] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bd0994-7ea7-4e80-813f-46c37cf9140d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.204288] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c94841-94c5-4559-a569-e6cee0955828 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.231711] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181035MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1129.231852] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.232049] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.377900] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378075] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378206] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378329] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378450] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378570] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378721] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378850] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.378967] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.379094] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.392368] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.403193] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2d427736-0a1d-4963-9380-6c8d47b39e1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.411967] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 74d737f1-8d72-411f-a0f7-a4483ae6804f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.421840] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 45e9996a-dee3-4cf3-85ed-c972f27dd7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.433288] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3b4a01e3-9653-45ba-9bc5-f37443fe0f40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.442532] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b3812d61-be7e-4c30-b59a-1eb59d987954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.452536] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7658ef83-ea52-41b2-b636-7f4fc7d9deea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.461882] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance bdbaaad2-4343-4864-ba52-108b2bff51f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.470955] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance cf721d5b-0a1d-4fa0-a985-eae683e7309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.481063] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2100b556-11e5-4846-ab1d-5eff53343ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.490543] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.500267] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.510067] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3c14fe11-5172-4611-acf8-c29746a5658e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.521149] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d6a0e44-f9b4-4da4-948b-b05b86c93a3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.532343] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.541623] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3fda5b6d-44b1-412a-9eff-0e8be3c725c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.551803] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2698cd39-5b23-48b8-ae60-48f7576c1546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.561356] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2b1b82aa-9f54-4829-98bf-011f6289a534 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.571033] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 46a76bbb-28ff-4b71-aa4e-f946ef586b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.582336] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a51672b6-f918-4ba3-9c55-af2edb3ec693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.591917] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.592208] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1129.592360] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1129.608106] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1129.622586] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1129.622586] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1129.635056] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1129.652790] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1129.999580] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb533f8-3b63-4e38-be88-2f2f4b3709de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.007054] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6eaa81-3902-4ef6-b5d6-d75b9b23ec12 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.037368] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e962275-d62c-4b48-8b14-ad92d3b3f3ae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.044423] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7613d045-5947-4106-8557-1f3a22e7482f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.057538] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.065868] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1130.082338] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1130.082534] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.850s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.067819] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.098496] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.098496] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1131.098496] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1131.115895] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.116507] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.116699] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.116833] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.116960] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117096] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117219] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117338] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117469] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117605] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1131.117724] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1148.336252] env[67015]: WARNING oslo_vmware.rw_handles [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1148.336252] env[67015]: ERROR oslo_vmware.rw_handles [ 1148.336963] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1148.339381] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1148.339655] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Copying Virtual Disk [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/8f753489-1e84-4808-9391-c5e4c9ad7ac7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1148.339961] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afd9955c-dc3a-45da-8fac-80a623d93591 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.348380] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for the task: (returnval){ [ 1148.348380] env[67015]: value = "task-3114470" [ 1148.348380] env[67015]: _type = "Task" [ 1148.348380] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.356180] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Task: {'id': task-3114470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.859432] env[67015]: DEBUG oslo_vmware.exceptions [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1148.859750] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.860325] env[67015]: ERROR nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1148.860325] env[67015]: Faults: ['InvalidArgument'] [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Traceback (most recent call last): [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] yield resources [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self.driver.spawn(context, instance, image_meta, [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self._fetch_image_if_missing(context, vi) [ 1148.860325] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] image_cache(vi, tmp_image_ds_loc) [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] vm_util.copy_virtual_disk( [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] session._wait_for_task(vmdk_copy_task) [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return self.wait_for_task(task_ref) [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return evt.wait() [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] result = hub.switch() [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1148.860720] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return self.greenlet.switch() [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self.f(*self.args, **self.kw) [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] raise exceptions.translate_fault(task_info.error) [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Faults: ['InvalidArgument'] [ 1148.861103] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] [ 1148.861103] env[67015]: INFO nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Terminating instance [ 1148.862405] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.862620] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.862888] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87724cac-0272-446b-bc56-2d93f7083bc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.865312] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1148.865502] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1148.866391] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85de9c38-0ae5-49c3-b999-4b8587b5607d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.872852] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1148.873079] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acbadc8a-1948-453d-9844-6a638e4626b6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.875218] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.875394] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1148.876459] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0466330f-f149-45a5-a0a6-5b7cfe1eb7f7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.881118] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for the task: (returnval){ [ 1148.881118] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a1c81b-47be-25d1-bb23-525606382b40" [ 1148.881118] env[67015]: _type = "Task" [ 1148.881118] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.889940] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a1c81b-47be-25d1-bb23-525606382b40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.957804] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1148.958105] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1148.958320] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Deleting the datastore file [datastore2] aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.958645] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b8a10a5-7443-4f8f-bcac-6048a8d4486b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.964722] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for the task: (returnval){ [ 1148.964722] env[67015]: value = "task-3114472" [ 1148.964722] env[67015]: _type = "Task" [ 1148.964722] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.972650] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Task: {'id': task-3114472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.392349] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1149.392624] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Creating directory with path [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.392864] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b170d17d-8f94-48bc-abc3-151df9e1064d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.403910] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Created directory with path [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.404127] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Fetch image to [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1149.404304] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1149.405053] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cbd180-9813-430e-8d43-62cea6ac515e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.412586] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45420d51-9e16-4c62-82c3-f455b7fa6511 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.421088] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceaafde3-a4b7-4452-b9bd-1ca4ae4b3d38 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.451009] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d51720-710b-413b-87bf-a42481cb5661 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.456439] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4f5b5423-1c54-4d71-bf32-39f00d7eccfd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.472387] env[67015]: DEBUG oslo_vmware.api [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Task: {'id': task-3114472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067731} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.472587] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.472765] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1149.472934] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1149.473126] env[67015]: INFO nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1149.476230] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1149.478468] env[67015]: DEBUG nova.compute.claims [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1149.478637] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.478845] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.527827] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1149.590393] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1149.590393] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1149.900477] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c1312d-b0b4-49d6-be84-382b586c4ea2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.907792] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4d15ef-bd67-4afb-8259-54a4ef6b716f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.937234] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e7568b-aee3-4a7d-be78-c58fcf96944d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.944367] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e477ffad-5afd-4f86-9265-c52439b6218a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.956886] env[67015]: DEBUG nova.compute.provider_tree [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.965656] env[67015]: DEBUG nova.scheduler.client.report [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1149.978861] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.500s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.979337] env[67015]: ERROR nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.979337] env[67015]: Faults: ['InvalidArgument'] [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Traceback (most recent call last): [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self.driver.spawn(context, instance, image_meta, [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self._fetch_image_if_missing(context, vi) [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] image_cache(vi, tmp_image_ds_loc) [ 1149.979337] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] vm_util.copy_virtual_disk( [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] session._wait_for_task(vmdk_copy_task) [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return self.wait_for_task(task_ref) [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return evt.wait() [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] result = hub.switch() [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] return self.greenlet.switch() [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1149.979741] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] self.f(*self.args, **self.kw) [ 1149.980139] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1149.980139] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] raise exceptions.translate_fault(task_info.error) [ 1149.980139] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.980139] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Faults: ['InvalidArgument'] [ 1149.980139] env[67015]: ERROR nova.compute.manager [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] [ 1149.980139] env[67015]: DEBUG nova.compute.utils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1149.981385] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Build of instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a was re-scheduled: A specified parameter was not correct: fileType [ 1149.981385] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1149.981773] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1149.981961] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1149.982142] env[67015]: DEBUG nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1149.982308] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.520156] env[67015]: DEBUG nova.network.neutron [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.535162] env[67015]: INFO nova.compute.manager [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Took 0.55 seconds to deallocate network for instance. [ 1150.643677] env[67015]: INFO nova.scheduler.client.report [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Deleted allocations for instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a [ 1150.668495] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f20d09a4-4b9d-48c8-835e-3a7bf8170387 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.984s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.669637] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.490s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.669860] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Acquiring lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.670101] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.670284] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.673689] env[67015]: INFO nova.compute.manager [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Terminating instance [ 1150.677413] env[67015]: DEBUG nova.compute.manager [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1150.677413] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1150.677413] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd7deb2d-062a-453d-b470-bb9b199f2560 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.686984] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9157134-d4e6-443d-9a93-0b05e43bbabf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.697393] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.719622] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a could not be found. [ 1150.719846] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.720026] env[67015]: INFO nova.compute.manager [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1150.720280] env[67015]: DEBUG oslo.service.loopingcall [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.720527] env[67015]: DEBUG nova.compute.manager [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1150.720632] env[67015]: DEBUG nova.network.neutron [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.748862] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.749168] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.750686] env[67015]: INFO nova.compute.claims [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.753931] env[67015]: DEBUG nova.network.neutron [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.762743] env[67015]: INFO nova.compute.manager [-] [instance: aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a] Took 0.04 seconds to deallocate network for instance. [ 1150.860992] env[67015]: DEBUG oslo_concurrency.lockutils [None req-eaf55957-7eee-425a-a3b4-2aa499d272d9 tempest-ImagesNegativeTestJSON-1468887496 tempest-ImagesNegativeTestJSON-1468887496-project-member] Lock "aa6e2bca-a3e3-4803-8120-bb8f3e6fae2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.132701] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb6e9c6-c4a4-42da-bdb6-9d06b1067f82 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.140397] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42a70e8-4e59-469b-92ba-c02c733bbf6b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.171050] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f85835-a8d7-44eb-83e1-fdff42b71685 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.178037] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b8da4d-b112-4e32-9b9f-0daea176c5ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.191655] env[67015]: DEBUG nova.compute.provider_tree [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.200580] env[67015]: DEBUG nova.scheduler.client.report [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1151.214612] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.465s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.215161] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1151.249380] env[67015]: DEBUG nova.compute.utils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1151.250734] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1151.250886] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1151.260595] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1151.319671] env[67015]: DEBUG nova.policy [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '722d10d821734272ae04fec85476e66c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '751221110b8a4b3fbbf6d865127fe3ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1151.325656] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1151.350795] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1151.351045] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1151.351212] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.351395] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1151.351541] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.351689] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1151.351894] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1151.352063] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1151.352232] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1151.352392] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1151.352561] env[67015]: DEBUG nova.virt.hardware [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1151.353396] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba43729-18fd-46c1-b8f5-8fc897b1feca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.361425] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3d9a6e-ec9c-4f4e-be6c-69ec08874539 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.825519] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Successfully created port: 60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.556061] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Successfully updated port: 60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1152.569687] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.569687] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquired lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.569687] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1152.614333] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1152.976859] env[67015]: DEBUG nova.compute.manager [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Received event network-vif-plugged-60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1152.977392] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Acquiring lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.977669] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.977879] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.978362] env[67015]: DEBUG nova.compute.manager [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] No waiting events found dispatching network-vif-plugged-60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1152.978569] env[67015]: WARNING nova.compute.manager [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Received unexpected event network-vif-plugged-60c26a47-8d6a-4361-8acb-8b379370ceee for instance with vm_state building and task_state spawning. [ 1152.978739] env[67015]: DEBUG nova.compute.manager [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Received event network-changed-60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1152.978915] env[67015]: DEBUG nova.compute.manager [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Refreshing instance network info cache due to event network-changed-60c26a47-8d6a-4361-8acb-8b379370ceee. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1152.979187] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Acquiring lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.124811] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Updating instance_info_cache with network_info: [{"id": "60c26a47-8d6a-4361-8acb-8b379370ceee", "address": "fa:16:3e:29:b1:59", "network": {"id": "032da4a0-96ab-4c05-a824-ccd22497c732", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1494508009-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751221110b8a4b3fbbf6d865127fe3ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c26a47-8d", "ovs_interfaceid": "60c26a47-8d6a-4361-8acb-8b379370ceee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.139119] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Releasing lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.139517] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance network_info: |[{"id": "60c26a47-8d6a-4361-8acb-8b379370ceee", "address": "fa:16:3e:29:b1:59", "network": {"id": "032da4a0-96ab-4c05-a824-ccd22497c732", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1494508009-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751221110b8a4b3fbbf6d865127fe3ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c26a47-8d", "ovs_interfaceid": "60c26a47-8d6a-4361-8acb-8b379370ceee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1153.139928] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Acquired lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.140215] env[67015]: DEBUG nova.network.neutron [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Refreshing network info cache for port 60c26a47-8d6a-4361-8acb-8b379370ceee {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1153.141858] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:b1:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60c26a47-8d6a-4361-8acb-8b379370ceee', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.153968] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Creating folder: Project (751221110b8a4b3fbbf6d865127fe3ac). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1153.158348] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f50ce653-a384-4e48-9615-afba67a53c87 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.170566] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Created folder: Project (751221110b8a4b3fbbf6d865127fe3ac) in parent group-v623108. [ 1153.170820] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Creating folder: Instances. Parent ref: group-v623178. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1153.171130] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40c34906-5cb1-471b-9ca6-86db37a2fbe9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.180649] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Created folder: Instances in parent group-v623178. [ 1153.180967] env[67015]: DEBUG oslo.service.loopingcall [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.181228] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1153.181494] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b3c01b4-21d5-4619-a713-f4e0bc19dcaf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.211339] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.211339] env[67015]: value = "task-3114475" [ 1153.211339] env[67015]: _type = "Task" [ 1153.211339] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.222415] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114475, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.618129] env[67015]: DEBUG nova.network.neutron [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Updated VIF entry in instance network info cache for port 60c26a47-8d6a-4361-8acb-8b379370ceee. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1153.618525] env[67015]: DEBUG nova.network.neutron [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Updating instance_info_cache with network_info: [{"id": "60c26a47-8d6a-4361-8acb-8b379370ceee", "address": "fa:16:3e:29:b1:59", "network": {"id": "032da4a0-96ab-4c05-a824-ccd22497c732", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1494508009-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751221110b8a4b3fbbf6d865127fe3ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c26a47-8d", "ovs_interfaceid": "60c26a47-8d6a-4361-8acb-8b379370ceee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.628515] env[67015]: DEBUG oslo_concurrency.lockutils [req-965f598a-7cce-4cab-9594-d197806a62f6 req-622ac065-081f-4d98-aecc-91053a983c28 service nova] Releasing lock "refresh_cache-132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.722447] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114475, 'name': CreateVM_Task, 'duration_secs': 0.270151} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.722650] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1153.723303] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.723467] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.723783] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.724041] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc3b180-9788-49ec-b2f4-8c7b61f08e67 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.729104] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for the task: (returnval){ [ 1153.729104] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]528f84f6-3e2a-244a-8a18-ebb1ac593330" [ 1153.729104] env[67015]: _type = "Task" [ 1153.729104] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.736438] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]528f84f6-3e2a-244a-8a18-ebb1ac593330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.238710] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.239066] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.239186] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.046317] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.662649] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.663079] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.761664] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.790667] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 1170.790667] env[67015]: value = "domain-c8" [ 1170.790667] env[67015]: _type = "ClusterComputeResource" [ 1170.790667] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1170.792134] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc5528c-df2e-4f46-95b5-1f94af92d4ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.811156] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 10 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1170.811350] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 66fa7689-aea7-4b88-b63c-0754f5e99d51 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.811607] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 3cbfca3b-863a-40d1-81ab-63794b8de97e {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.811697] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 5c77964f-e902-489a-86c3-9c9d4dd304d3 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.811850] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 96feb18e-14ee-40cf-bd5d-89a4e773c797 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.812026] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 30f91210-0318-4912-808b-843c2cd04ea1 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.812167] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 843278e1-7d76-4f50-8170-9e335d29326e {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.812316] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid db3de804-63b7-4887-b752-282e70e0f20e {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.812464] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 8c919afe-37b6-47f0-b939-d9df5800d7ee {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.812610] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 437b21d5-932d-4216-b7f7-17c6eab2665f {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.813177] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1170.813570] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.813815] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.814042] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.814255] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.814455] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "30f91210-0318-4912-808b-843c2cd04ea1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.814649] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "843278e1-7d76-4f50-8170-9e335d29326e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.814876] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "db3de804-63b7-4887-b752-282e70e0f20e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.815099] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.815374] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "437b21d5-932d-4216-b7f7-17c6eab2665f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.816031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.476461] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.476754] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.567721] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.509561] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1185.513646] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1187.128504] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c0de6377-9867-460e-8fa8-d1a6de030cb0 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "daa757b3-3ad6-477f-a7e9-b81a863e9a8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.129431] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c0de6377-9867-460e-8fa8-d1a6de030cb0 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "daa757b3-3ad6-477f-a7e9-b81a863e9a8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.514583] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.514976] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.515298] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.515400] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1188.661047] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39fdf1ce-72f4-4618-8dc5-a8f91ca7b493 tempest-ServerAddressesTestJSON-1912094255 tempest-ServerAddressesTestJSON-1912094255-project-member] Acquiring lock "01c3904e-b014-4f3a-8647-ba5fb786d960" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.661340] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39fdf1ce-72f4-4618-8dc5-a8f91ca7b493 tempest-ServerAddressesTestJSON-1912094255 tempest-ServerAddressesTestJSON-1912094255-project-member] Lock "01c3904e-b014-4f3a-8647-ba5fb786d960" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.514191] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.514515] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.527049] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.527049] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.527049] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.527420] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1189.528386] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25d7fa6-1883-465c-815a-d33d7d3370f4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.537234] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6630ce4-a5bd-4719-a116-5692b6ae310b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.550977] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2206033c-5cb5-402b-a241-03c6440b991e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.557337] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd8c866-2ba5-42fc-a6c7-9286a62f1923 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.586055] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180995MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1189.586055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.586157] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.663773] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.663945] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664092] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664219] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664341] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664459] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664577] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664691] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.664803] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.665014] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1189.676595] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance cf721d5b-0a1d-4fa0-a985-eae683e7309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.688523] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2100b556-11e5-4846-ab1d-5eff53343ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.698680] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.708587] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.718099] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3c14fe11-5172-4611-acf8-c29746a5658e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.727907] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d6a0e44-f9b4-4da4-948b-b05b86c93a3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.736806] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.747025] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 3fda5b6d-44b1-412a-9eff-0e8be3c725c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.756702] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2698cd39-5b23-48b8-ae60-48f7576c1546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.765992] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 2b1b82aa-9f54-4829-98bf-011f6289a534 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.776282] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 46a76bbb-28ff-4b71-aa4e-f946ef586b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.785050] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance a51672b6-f918-4ba3-9c55-af2edb3ec693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.793900] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.803130] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.811972] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.821061] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance daa757b3-3ad6-477f-a7e9-b81a863e9a8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.829905] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 01c3904e-b014-4f3a-8647-ba5fb786d960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1189.830151] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1189.830300] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1190.178574] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dfb1d9-f946-4212-876e-6dfe956d22c5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.186250] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb12fbc-c8b3-40e8-8021-db4d6740c648 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.216393] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3acda6-d517-43a8-9fdf-137d54adde35 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.223658] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e952eedc-6cb2-404a-b49d-0c3e77cc7d2d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.236399] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.244802] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1190.261043] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1190.261272] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.675s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.261553] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.261991] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1192.261991] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1192.288148] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288148] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288148] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288148] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288395] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288395] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288460] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288580] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288695] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288799] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1192.288921] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1198.353877] env[67015]: WARNING oslo_vmware.rw_handles [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1198.353877] env[67015]: ERROR oslo_vmware.rw_handles [ 1198.354492] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1198.356510] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1198.356790] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/0a17b68f-7c0c-40d6-ace6-ec2ccf02cf80/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1198.357124] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a30d7451-8fa3-456a-914c-485e726bba34 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.365165] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for the task: (returnval){ [ 1198.365165] env[67015]: value = "task-3114476" [ 1198.365165] env[67015]: _type = "Task" [ 1198.365165] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.373173] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Task: {'id': task-3114476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.876120] env[67015]: DEBUG oslo_vmware.exceptions [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1198.876428] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.876999] env[67015]: ERROR nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.876999] env[67015]: Faults: ['InvalidArgument'] [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Traceback (most recent call last): [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] yield resources [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self.driver.spawn(context, instance, image_meta, [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self._fetch_image_if_missing(context, vi) [ 1198.876999] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] image_cache(vi, tmp_image_ds_loc) [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] vm_util.copy_virtual_disk( [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] session._wait_for_task(vmdk_copy_task) [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return self.wait_for_task(task_ref) [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return evt.wait() [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] result = hub.switch() [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1198.877393] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return self.greenlet.switch() [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self.f(*self.args, **self.kw) [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] raise exceptions.translate_fault(task_info.error) [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Faults: ['InvalidArgument'] [ 1198.877755] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] [ 1198.877755] env[67015]: INFO nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Terminating instance [ 1198.878954] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.879181] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.879424] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32fb34b0-8544-4b1b-9835-f40f18f19f3f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.881776] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1198.881973] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.882701] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73332ec-b1ca-4347-afb6-3ca96eac0b0a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.889141] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1198.889380] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d825bed-ac6c-48d1-bb13-11cf91f16371 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.891587] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.891760] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1198.892824] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ab22caf-25df-4892-8c92-7df320827c4f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.897474] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for the task: (returnval){ [ 1198.897474] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52284a76-e239-2314-093b-816bab578728" [ 1198.897474] env[67015]: _type = "Task" [ 1198.897474] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.904518] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52284a76-e239-2314-093b-816bab578728, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.962963] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1198.962963] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1198.962963] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Deleting the datastore file [datastore2] 66fa7689-aea7-4b88-b63c-0754f5e99d51 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.962963] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ee4087e-fa32-4e22-87e3-66d9e059b9bf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.969910] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for the task: (returnval){ [ 1198.969910] env[67015]: value = "task-3114478" [ 1198.969910] env[67015]: _type = "Task" [ 1198.969910] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.977177] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Task: {'id': task-3114478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.408031] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1199.408319] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Creating directory with path [datastore2] vmware_temp/33ed667d-373a-4213-802e-21dfdfbe5700/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.408540] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be598aa3-921f-4d82-b83c-42ce53d41d86 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.419232] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Created directory with path [datastore2] vmware_temp/33ed667d-373a-4213-802e-21dfdfbe5700/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.419431] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Fetch image to [datastore2] vmware_temp/33ed667d-373a-4213-802e-21dfdfbe5700/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1199.419600] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/33ed667d-373a-4213-802e-21dfdfbe5700/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1199.420316] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ef888c-f953-412e-a082-efe2f72ba10d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.428285] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0070ef-d379-4938-972c-a2e2cdc4d598 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.436910] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44caee3f-cbd1-48ce-bfc0-b4400c1f51f6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.468277] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550882bb-8b32-4a49-bbb6-fae3b59770eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.479568] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d958efe2-fc43-4255-9e47-6c2f6ec8775f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.481296] env[67015]: DEBUG oslo_vmware.api [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Task: {'id': task-3114478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077099} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.481545] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.481726] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1199.481895] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1199.482091] env[67015]: INFO nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1199.484214] env[67015]: DEBUG nova.compute.claims [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1199.484357] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.484568] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.511562] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1199.717237] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.719008] env[67015]: ERROR nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = getattr(controller, method)(*args, **kwargs) [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._get(image_id) [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1199.719008] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] resp, body = self.http_client.get(url, headers=header) [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.request(url, 'GET', **kwargs) [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._handle_response(resp) [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise exc.from_response(resp, resp.content) [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] During handling of the above exception, another exception occurred: [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1199.719361] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] yield resources [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self.driver.spawn(context, instance, image_meta, [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._fetch_image_if_missing(context, vi) [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image_fetch(context, vi, tmp_image_ds_loc) [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] images.fetch_image( [ 1199.719734] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] metadata = IMAGE_API.get(context, image_ref) [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return session.show(context, image_id, [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] _reraise_translated_image_exception(image_id) [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise new_exc.with_traceback(exc_trace) [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = getattr(controller, method)(*args, **kwargs) [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1199.720133] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._get(image_id) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] resp, body = self.http_client.get(url, headers=header) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.request(url, 'GET', **kwargs) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._handle_response(resp) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise exc.from_response(resp, resp.content) [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1199.720481] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1199.720796] env[67015]: INFO nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Terminating instance [ 1199.720939] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.721409] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.724067] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.724232] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.724461] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1199.725444] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c579970-3e68-403f-8290-ee99345f69eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.736183] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.736375] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1199.737672] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6cbee38-8873-4e49-befc-cb46eb316455 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.744613] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for the task: (returnval){ [ 1199.744613] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]521fc1ff-949a-9d7d-933e-0c4f66510048" [ 1199.744613] env[67015]: _type = "Task" [ 1199.744613] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.752576] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]521fc1ff-949a-9d7d-933e-0c4f66510048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.799824] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1199.844174] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a59ef8c-8135-40d1-b171-4120188b5391 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.849934] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d7d444-5842-49d0-8e42-83bb62a2fc80 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.881464] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.883097] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03df33a6-4169-4d2f-8450-d254ac0a77a3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.890279] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ad8716-429a-4d82-8456-3b1f2216ebae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.894717] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Releasing lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.895135] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1199.896173] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1199.897020] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3feb85-3383-4cd2-bbab-6d19b3972380 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.906765] env[67015]: DEBUG nova.compute.provider_tree [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.911543] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1199.911760] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c5f45cf-fcde-4434-ad04-12b5f1663682 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.918407] env[67015]: DEBUG nova.scheduler.client.report [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1199.933213] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.448s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.933884] env[67015]: ERROR nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.933884] env[67015]: Faults: ['InvalidArgument'] [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Traceback (most recent call last): [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self.driver.spawn(context, instance, image_meta, [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self._fetch_image_if_missing(context, vi) [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] image_cache(vi, tmp_image_ds_loc) [ 1199.933884] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] vm_util.copy_virtual_disk( [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] session._wait_for_task(vmdk_copy_task) [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return self.wait_for_task(task_ref) [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return evt.wait() [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] result = hub.switch() [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] return self.greenlet.switch() [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1199.934260] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] self.f(*self.args, **self.kw) [ 1199.934598] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1199.934598] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] raise exceptions.translate_fault(task_info.error) [ 1199.934598] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.934598] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Faults: ['InvalidArgument'] [ 1199.934598] env[67015]: ERROR nova.compute.manager [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] [ 1199.934740] env[67015]: DEBUG nova.compute.utils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1199.937521] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Build of instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 was re-scheduled: A specified parameter was not correct: fileType [ 1199.937521] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1199.937905] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1199.938098] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1199.938281] env[67015]: DEBUG nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1199.938448] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1199.940125] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1199.940324] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1199.940502] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Deleting the datastore file [datastore2] 3cbfca3b-863a-40d1-81ab-63794b8de97e {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.940971] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e26342a-e20d-47e4-9249-27139397f648 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.947373] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for the task: (returnval){ [ 1199.947373] env[67015]: value = "task-3114480" [ 1199.947373] env[67015]: _type = "Task" [ 1199.947373] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.954740] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Task: {'id': task-3114480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.255052] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1200.255343] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Creating directory with path [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.255576] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89ba17d3-6307-4bc8-9fce-9b5feb800259 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.266462] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Created directory with path [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.266649] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Fetch image to [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1200.266820] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1200.267605] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fabdd6-f454-4399-8d1f-11b602a09d25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.274009] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0402c8d6-1cd3-48f4-8e19-3a062532b648 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.284342] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37518fab-4c09-4d5b-a9d8-1ce1b1d21e4a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.315322] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94423f00-82b9-43ad-bcde-c6816a6f98c8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.321266] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d3f6b2c2-5f81-4e6b-9987-ef0f9960d566 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.344374] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1200.407973] env[67015]: DEBUG nova.network.neutron [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.410869] env[67015]: DEBUG oslo_vmware.rw_handles [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1200.487064] env[67015]: INFO nova.compute.manager [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Took 0.55 seconds to deallocate network for instance. [ 1200.500013] env[67015]: DEBUG oslo_vmware.rw_handles [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1200.500013] env[67015]: DEBUG oslo_vmware.rw_handles [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1200.506317] env[67015]: DEBUG oslo_vmware.api [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Task: {'id': task-3114480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038944} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.506680] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.506910] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1200.507194] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1200.507463] env[67015]: INFO nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1200.507988] env[67015]: DEBUG oslo.service.loopingcall [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.507988] env[67015]: DEBUG nova.compute.manager [-] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1200.510141] env[67015]: DEBUG nova.compute.claims [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1200.510332] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.511520] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.586593] env[67015]: INFO nova.scheduler.client.report [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Deleted allocations for instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 [ 1200.607786] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7dc6944-48b9-4c8b-a26e-2196a1348d00 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 617.198s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.612019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 419.392s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.612019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Acquiring lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.612019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.612326] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.612326] env[67015]: INFO nova.compute.manager [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Terminating instance [ 1200.615638] env[67015]: DEBUG nova.compute.manager [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1200.615869] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.616157] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fcd9c92-9e55-4879-b6bd-2f886d23032a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.625705] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850c08ac-ffff-420c-9f05-400bf5651fc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.637103] env[67015]: DEBUG nova.compute.manager [None req-6fc1e8b9-212f-4cd8-90c1-aa84c166ad2c tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 2d427736-0a1d-4963-9380-6c8d47b39e1c] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.660793] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66fa7689-aea7-4b88-b63c-0754f5e99d51 could not be found. [ 1200.661022] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1200.661204] env[67015]: INFO nova.compute.manager [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1200.661447] env[67015]: DEBUG oslo.service.loopingcall [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.664052] env[67015]: DEBUG nova.compute.manager [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1200.664152] env[67015]: DEBUG nova.network.neutron [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1200.666945] env[67015]: DEBUG nova.compute.manager [None req-6fc1e8b9-212f-4cd8-90c1-aa84c166ad2c tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: 2d427736-0a1d-4963-9380-6c8d47b39e1c] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.693975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-6fc1e8b9-212f-4cd8-90c1-aa84c166ad2c tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "2d427736-0a1d-4963-9380-6c8d47b39e1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.794s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.699287] env[67015]: DEBUG nova.network.neutron [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.704343] env[67015]: DEBUG nova.compute.manager [None req-d74084d1-ae71-47a7-8f70-63952ca0ea3f tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 74d737f1-8d72-411f-a0f7-a4483ae6804f] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.707188] env[67015]: INFO nova.compute.manager [-] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] Took 0.04 seconds to deallocate network for instance. [ 1200.734934] env[67015]: DEBUG nova.compute.manager [None req-d74084d1-ae71-47a7-8f70-63952ca0ea3f tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] [instance: 74d737f1-8d72-411f-a0f7-a4483ae6804f] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.767783] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d74084d1-ae71-47a7-8f70-63952ca0ea3f tempest-MigrationsAdminTest-1264630040 tempest-MigrationsAdminTest-1264630040-project-member] Lock "74d737f1-8d72-411f-a0f7-a4483ae6804f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.283s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.777791] env[67015]: DEBUG nova.compute.manager [None req-f68fa909-1400-43ff-aa2a-60d6b49f5d70 tempest-InstanceActionsNegativeTestJSON-7259337 tempest-InstanceActionsNegativeTestJSON-7259337-project-member] [instance: 45e9996a-dee3-4cf3-85ed-c972f27dd7b8] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.814114] env[67015]: DEBUG nova.compute.manager [None req-f68fa909-1400-43ff-aa2a-60d6b49f5d70 tempest-InstanceActionsNegativeTestJSON-7259337 tempest-InstanceActionsNegativeTestJSON-7259337-project-member] [instance: 45e9996a-dee3-4cf3-85ed-c972f27dd7b8] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.829807] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3ae69a9a-a397-47a3-b787-5d7c54f3c624 tempest-ServersWithSpecificFlavorTestJSON-2003116898 tempest-ServersWithSpecificFlavorTestJSON-2003116898-project-member] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.831034] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.017s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.831133] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 66fa7689-aea7-4b88-b63c-0754f5e99d51] During sync_power_state the instance has a pending task (deleting). Skip. [ 1200.831251] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "66fa7689-aea7-4b88-b63c-0754f5e99d51" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.838397] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f68fa909-1400-43ff-aa2a-60d6b49f5d70 tempest-InstanceActionsNegativeTestJSON-7259337 tempest-InstanceActionsNegativeTestJSON-7259337-project-member] Lock "45e9996a-dee3-4cf3-85ed-c972f27dd7b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.114s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.848068] env[67015]: DEBUG nova.compute.manager [None req-a5c07553-15e3-4471-b4d4-e10502c6a75a tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] [instance: 3b4a01e3-9653-45ba-9bc5-f37443fe0f40] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.893612] env[67015]: DEBUG nova.compute.manager [None req-a5c07553-15e3-4471-b4d4-e10502c6a75a tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] [instance: 3b4a01e3-9653-45ba-9bc5-f37443fe0f40] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.915700] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5c07553-15e3-4471-b4d4-e10502c6a75a tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Lock "3b4a01e3-9653-45ba-9bc5-f37443fe0f40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.810s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.918739] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d889133b-430e-4c32-9123-f0f1b538e785 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.926741] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1764b840-5589-4f39-8c05-559e367d7f94 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.930130] env[67015]: DEBUG nova.compute.manager [None req-efe5f90f-e0dd-4a42-b586-f7fd89e387ab tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] [instance: b3812d61-be7e-4c30-b59a-1eb59d987954] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.960151] env[67015]: DEBUG nova.compute.manager [None req-efe5f90f-e0dd-4a42-b586-f7fd89e387ab tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] [instance: b3812d61-be7e-4c30-b59a-1eb59d987954] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.961519] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62b019f-74bb-4a61-84e4-335264775eae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.969103] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3be301e-7e42-4e30-bbe3-6be0d3436bb3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.984747] env[67015]: DEBUG nova.compute.provider_tree [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.986517] env[67015]: DEBUG oslo_concurrency.lockutils [None req-efe5f90f-e0dd-4a42-b586-f7fd89e387ab tempest-ServersAdminTestJSON-1171030578 tempest-ServersAdminTestJSON-1171030578-project-member] Lock "b3812d61-be7e-4c30-b59a-1eb59d987954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.341s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.992605] env[67015]: DEBUG nova.scheduler.client.report [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1200.995911] env[67015]: DEBUG nova.compute.manager [None req-f1b4cb23-b497-4f35-b58e-211e36500b99 tempest-ImagesOneServerNegativeTestJSON-971646581 tempest-ImagesOneServerNegativeTestJSON-971646581-project-member] [instance: 7658ef83-ea52-41b2-b636-7f4fc7d9deea] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.009097] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.498s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.010086] env[67015]: ERROR nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = getattr(controller, method)(*args, **kwargs) [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._get(image_id) [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.010086] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] resp, body = self.http_client.get(url, headers=header) [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.request(url, 'GET', **kwargs) [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._handle_response(resp) [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise exc.from_response(resp, resp.content) [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] During handling of the above exception, another exception occurred: [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.010565] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self.driver.spawn(context, instance, image_meta, [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._fetch_image_if_missing(context, vi) [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image_fetch(context, vi, tmp_image_ds_loc) [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] images.fetch_image( [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] metadata = IMAGE_API.get(context, image_ref) [ 1201.010938] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return session.show(context, image_id, [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] _reraise_translated_image_exception(image_id) [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise new_exc.with_traceback(exc_trace) [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = getattr(controller, method)(*args, **kwargs) [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._get(image_id) [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1201.012247] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] resp, body = self.http_client.get(url, headers=header) [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.request(url, 'GET', **kwargs) [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self._handle_response(resp) [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise exc.from_response(resp, resp.content) [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1201.012719] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.012719] env[67015]: DEBUG nova.compute.utils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1201.013080] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Build of instance 3cbfca3b-863a-40d1-81ab-63794b8de97e was re-scheduled: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1201.013080] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1201.013080] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.013080] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.013249] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1201.019886] env[67015]: DEBUG nova.compute.manager [None req-f1b4cb23-b497-4f35-b58e-211e36500b99 tempest-ImagesOneServerNegativeTestJSON-971646581 tempest-ImagesOneServerNegativeTestJSON-971646581-project-member] [instance: 7658ef83-ea52-41b2-b636-7f4fc7d9deea] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1201.039246] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f1b4cb23-b497-4f35-b58e-211e36500b99 tempest-ImagesOneServerNegativeTestJSON-971646581 tempest-ImagesOneServerNegativeTestJSON-971646581-project-member] Lock "7658ef83-ea52-41b2-b636-7f4fc7d9deea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.234s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.048642] env[67015]: DEBUG nova.compute.manager [None req-e0d57ef6-1d44-4420-851e-df272a254979 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: bdbaaad2-4343-4864-ba52-108b2bff51f1] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.054420] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1201.073061] env[67015]: DEBUG nova.compute.manager [None req-e0d57ef6-1d44-4420-851e-df272a254979 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: bdbaaad2-4343-4864-ba52-108b2bff51f1] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1201.092504] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e0d57ef6-1d44-4420-851e-df272a254979 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "bdbaaad2-4343-4864-ba52-108b2bff51f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.447s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.101134] env[67015]: DEBUG nova.compute.manager [None req-dc05981c-ced1-4edd-bc43-df9933b064d7 tempest-ServersNegativeTestMultiTenantJSON-331051477 tempest-ServersNegativeTestMultiTenantJSON-331051477-project-member] [instance: cf721d5b-0a1d-4fa0-a985-eae683e7309f] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.126276] env[67015]: DEBUG nova.compute.manager [None req-dc05981c-ced1-4edd-bc43-df9933b064d7 tempest-ServersNegativeTestMultiTenantJSON-331051477 tempest-ServersNegativeTestMultiTenantJSON-331051477-project-member] [instance: cf721d5b-0a1d-4fa0-a985-eae683e7309f] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1201.147784] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc05981c-ced1-4edd-bc43-df9933b064d7 tempest-ServersNegativeTestMultiTenantJSON-331051477 tempest-ServersNegativeTestMultiTenantJSON-331051477-project-member] Lock "cf721d5b-0a1d-4fa0-a985-eae683e7309f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.434s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.156573] env[67015]: DEBUG nova.compute.manager [None req-bb000d50-e079-4e0b-a9dd-54be9706a9bf tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 2100b556-11e5-4846-ab1d-5eff53343ef4] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.180567] env[67015]: DEBUG nova.compute.manager [None req-bb000d50-e079-4e0b-a9dd-54be9706a9bf tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 2100b556-11e5-4846-ab1d-5eff53343ef4] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1201.194490] env[67015]: DEBUG nova.network.neutron [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.205846] env[67015]: DEBUG oslo_concurrency.lockutils [None req-bb000d50-e079-4e0b-a9dd-54be9706a9bf tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "2100b556-11e5-4846-ab1d-5eff53343ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.518s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.208670] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Releasing lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.208996] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1201.209091] env[67015]: DEBUG nova.compute.manager [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Skipping network deallocation for instance since networking was not requested. {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1201.216172] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.266046] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.266046] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.267272] env[67015]: INFO nova.compute.claims [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.300977] env[67015]: INFO nova.scheduler.client.report [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Deleted allocations for instance 3cbfca3b-863a-40d1-81ab-63794b8de97e [ 1201.317163] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c9f216a1-f9de-4d62-a1ef-4ece74d22dbe tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.614s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.318190] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 409.854s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.318451] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "3cbfca3b-863a-40d1-81ab-63794b8de97e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.318665] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.318837] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.320771] env[67015]: INFO nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Terminating instance [ 1201.322373] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquiring lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.322527] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Acquired lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.322692] env[67015]: DEBUG nova.network.neutron [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1201.326742] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1201.364374] env[67015]: DEBUG nova.network.neutron [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1201.382749] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.491398] env[67015]: DEBUG nova.network.neutron [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.503135] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Releasing lock "refresh_cache-3cbfca3b-863a-40d1-81ab-63794b8de97e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.503553] env[67015]: DEBUG nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1201.503744] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1201.504287] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a14df6e2-e312-48b3-b160-86972fd32546 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.514979] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4592f74-f11a-4903-9336-84892dc4e835 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.550268] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3cbfca3b-863a-40d1-81ab-63794b8de97e could not be found. [ 1201.550550] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1201.551447] env[67015]: INFO nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1201.551754] env[67015]: DEBUG oslo.service.loopingcall [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.554531] env[67015]: DEBUG nova.compute.manager [-] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1201.554639] env[67015]: DEBUG nova.network.neutron [-] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1201.628683] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9489fd-f94c-4842-8812-b55ba511c8e6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.637244] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69d7bbf-7eec-4d86-8d96-c38a98626e54 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.670530] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e6f190-e1bb-490e-bd5c-a3fdb284f2b8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.677626] env[67015]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67015) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1201.677817] env[67015]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-a77a4705-7a5c-4f08-a370-1f4d5187f990'] [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1201.679639] env[67015]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1201.680388] env[67015]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.681403] env[67015]: ERROR oslo.service.loopingcall [ 1201.682309] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34223ff4-69b3-4369-a2b8-dc1f5fb1f946 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.685913] env[67015]: ERROR nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.697712] env[67015]: DEBUG nova.compute.provider_tree [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.707126] env[67015]: DEBUG nova.scheduler.client.report [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1201.724772] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.459s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.725100] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1201.728159] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.345s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.729134] env[67015]: INFO nova.compute.claims [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.734274] env[67015]: ERROR nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] exception_handler_v20(status_code, error_body) [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise client_exc(message=error_message, [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Neutron server returns request_ids: ['req-a77a4705-7a5c-4f08-a370-1f4d5187f990'] [ 1201.734274] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] During handling of the above exception, another exception occurred: [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Traceback (most recent call last): [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._delete_instance(context, instance, bdms) [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._shutdown_instance(context, instance, bdms) [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._try_deallocate_network(context, instance, requested_networks) [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] with excutils.save_and_reraise_exception(): [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.734640] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self.force_reraise() [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise self.value [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] _deallocate_network_with_retries() [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return evt.wait() [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = hub.switch() [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.greenlet.switch() [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1201.735030] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = func(*self.args, **self.kw) [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] result = f(*args, **kwargs) [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._deallocate_network( [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self.network_api.deallocate_for_instance( [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] data = neutron.list_ports(**search_opts) [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.list('ports', self.ports_path, retrieve_all, [ 1201.735416] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] for r in self._pagination(collection, path, **params): [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] res = self.get(path, params=params) [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.retry_request("GET", action, body=body, [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1201.735833] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] return self.do_request(method, action, body=body, [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] ret = obj(*args, **kwargs) [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] self._handle_fault_response(status_code, replybody, resp) [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.736189] env[67015]: ERROR nova.compute.manager [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] [ 1201.803202] env[67015]: DEBUG nova.compute.utils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1201.804543] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1201.805143] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1201.808196] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.490s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.812828] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.999s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.813331] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1201.813331] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "3cbfca3b-863a-40d1-81ab-63794b8de97e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.820045] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1201.869574] env[67015]: INFO nova.compute.manager [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] [instance: 3cbfca3b-863a-40d1-81ab-63794b8de97e] Successfully reverted task state from None on failure for instance. [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server [None req-b7a26bfe-85a6-48b4-bdac-bdc1485a7944 tempest-ServerShowV247Test-2049672408 tempest-ServerShowV247Test-2049672408-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-a77a4705-7a5c-4f08-a370-1f4d5187f990'] [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1201.875071] env[67015]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.875584] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 1201.876075] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1201.876547] env[67015]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.877151] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1201.877637] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1201.878112] env[67015]: ERROR oslo_messaging.rpc.server [ 1201.887038] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1201.915719] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1201.917428] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1201.917428] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.917428] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1201.917428] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.917428] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1201.917668] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1201.917668] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1201.917668] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1201.917668] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1201.917850] env[67015]: DEBUG nova.virt.hardware [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1201.918753] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30358c63-d147-4965-9395-bce8f00f6ce3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.928554] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "6d104e2f-9924-4094-823d-a78c21acfc7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.928821] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.933404] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445ab3c4-ccc3-4a9b-9ad7-ac1b4e9f54be {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.960504] env[67015]: DEBUG nova.policy [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74eb5b7bde3544f1acbcf4d675027421', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23b843f1db954cf6be117fabe2268c32', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1202.104438] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10057a86-ae39-4764-8b86-9d961de4e4b9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.111385] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e425ef14-f4d3-4dce-ada1-3ef8de0272c4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.140213] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beba84ee-34f5-42dd-9520-daba06120c0e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.147174] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce76fd5-c352-4188-96df-127c80938c19 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.160109] env[67015]: DEBUG nova.compute.provider_tree [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.169906] env[67015]: DEBUG nova.scheduler.client.report [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1202.183977] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.456s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.184639] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1202.217179] env[67015]: DEBUG nova.compute.utils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1202.218874] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1202.219100] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1202.233065] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1202.274886] env[67015]: DEBUG nova.policy [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13fbc223d45a4e61952ea5358f8c5b46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fadd44ab0304a74951242bff6ce54fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1202.298579] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1202.328524] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1202.328769] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1202.328929] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.329134] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1202.329303] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.329478] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1202.329695] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1202.329854] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1202.330157] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1202.330393] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1202.330591] env[67015]: DEBUG nova.virt.hardware [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1202.332140] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ac2da8-b94d-4b51-b37e-5a14635d9ac3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.342707] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c986b7-e9af-4f29-9e31-35716ad17271 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.605604] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Successfully created port: a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1202.618099] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Successfully created port: 17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1203.240565] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Successfully updated port: 17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1203.249325] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.249325] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquired lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.251560] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1203.301798] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1203.467964] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Updating instance_info_cache with network_info: [{"id": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "address": "fa:16:3e:3b:27:85", "network": {"id": "0c302ac0-c37a-4b65-9b73-f50b93c12025", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1281668385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fadd44ab0304a74951242bff6ce54fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17cf0f6b-a3", "ovs_interfaceid": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.480735] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Releasing lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.481331] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance network_info: |[{"id": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "address": "fa:16:3e:3b:27:85", "network": {"id": "0c302ac0-c37a-4b65-9b73-f50b93c12025", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1281668385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fadd44ab0304a74951242bff6ce54fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17cf0f6b-a3", "ovs_interfaceid": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1203.481985] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:27:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0234c8-1a2d-47ff-9a72-2e7d35b49214', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17cf0f6b-a3bd-4d1d-95ec-28c941296684', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1203.489508] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Creating folder: Project (0fadd44ab0304a74951242bff6ce54fa). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1203.489975] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01c7242d-6f2e-42d5-882b-09f66a3e2c93 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.500675] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Created folder: Project (0fadd44ab0304a74951242bff6ce54fa) in parent group-v623108. [ 1203.500862] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Creating folder: Instances. Parent ref: group-v623181. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1203.501087] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9362c416-5fc8-4066-81a7-35b0dd88edca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.516573] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Created folder: Instances in parent group-v623181. [ 1203.516854] env[67015]: DEBUG oslo.service.loopingcall [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.517099] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1203.517313] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-879d58a2-23c9-45c2-9fc9-f502897d115c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.550446] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1203.550446] env[67015]: value = "task-3114483" [ 1203.550446] env[67015]: _type = "Task" [ 1203.550446] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.558019] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114483, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.613586] env[67015]: DEBUG nova.compute.manager [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Received event network-vif-plugged-a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1203.613866] env[67015]: DEBUG oslo_concurrency.lockutils [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] Acquiring lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.614139] env[67015]: DEBUG oslo_concurrency.lockutils [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] Lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.614370] env[67015]: DEBUG oslo_concurrency.lockutils [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] Lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.614616] env[67015]: DEBUG nova.compute.manager [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] No waiting events found dispatching network-vif-plugged-a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1203.614805] env[67015]: WARNING nova.compute.manager [req-b7a5edbe-eec5-47b5-afee-1d43e6363712 req-41213277-bcd6-43a2-815e-1ccba67ac3de service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Received unexpected event network-vif-plugged-a2694771-e780-462e-9f0e-0f8d0808e78c for instance with vm_state building and task_state spawning. [ 1203.631397] env[67015]: DEBUG nova.compute.manager [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Received event network-vif-plugged-17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1203.631597] env[67015]: DEBUG oslo_concurrency.lockutils [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] Acquiring lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.631911] env[67015]: DEBUG oslo_concurrency.lockutils [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.632063] env[67015]: DEBUG oslo_concurrency.lockutils [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.632561] env[67015]: DEBUG nova.compute.manager [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] No waiting events found dispatching network-vif-plugged-17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1203.632561] env[67015]: WARNING nova.compute.manager [req-43155fb1-40e9-45f4-a482-5eb19703e53f req-b46ba247-e39e-4144-b110-841dfd43ff4e service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Received unexpected event network-vif-plugged-17cf0f6b-a3bd-4d1d-95ec-28c941296684 for instance with vm_state building and task_state spawning. [ 1203.674159] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Successfully updated port: a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1203.685987] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.686144] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquired lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.686292] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1203.729839] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1204.062051] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114483, 'name': CreateVM_Task, 'duration_secs': 0.285253} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.062051] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1204.067336] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.067515] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.067828] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1204.068109] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2578af51-9d53-42a8-b78e-caa2b2073c6e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.072740] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for the task: (returnval){ [ 1204.072740] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]521b6e74-da31-0c89-b007-65df2e3b753e" [ 1204.072740] env[67015]: _type = "Task" [ 1204.072740] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.080268] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]521b6e74-da31-0c89-b007-65df2e3b753e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.085695] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Updating instance_info_cache with network_info: [{"id": "a2694771-e780-462e-9f0e-0f8d0808e78c", "address": "fa:16:3e:9f:1c:50", "network": {"id": "32ff97ea-2879-466b-afef-c63118b64ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-589104789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23b843f1db954cf6be117fabe2268c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2694771-e7", "ovs_interfaceid": "a2694771-e780-462e-9f0e-0f8d0808e78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.100933] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Releasing lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.101245] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance network_info: |[{"id": "a2694771-e780-462e-9f0e-0f8d0808e78c", "address": "fa:16:3e:9f:1c:50", "network": {"id": "32ff97ea-2879-466b-afef-c63118b64ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-589104789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23b843f1db954cf6be117fabe2268c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2694771-e7", "ovs_interfaceid": "a2694771-e780-462e-9f0e-0f8d0808e78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1204.101660] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:1c:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bab6a6c3-1c5c-4776-b21b-dec21196d702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2694771-e780-462e-9f0e-0f8d0808e78c', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.109873] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Creating folder: Project (23b843f1db954cf6be117fabe2268c32). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1204.110422] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8652f507-a758-4b68-83b4-a60836ac6187 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.121123] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Created folder: Project (23b843f1db954cf6be117fabe2268c32) in parent group-v623108. [ 1204.121312] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Creating folder: Instances. Parent ref: group-v623184. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1204.121536] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e31e35d8-d182-4137-9938-2ed1dc4f34a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.130061] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Created folder: Instances in parent group-v623184. [ 1204.130293] env[67015]: DEBUG oslo.service.loopingcall [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.130471] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1204.130663] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52725ae9-9385-48d3-b2db-bb071546c783 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.149242] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.149242] env[67015]: value = "task-3114486" [ 1204.149242] env[67015]: _type = "Task" [ 1204.149242] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.156687] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114486, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.584573] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.584853] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1204.585077] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.661669] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114486, 'name': CreateVM_Task, 'duration_secs': 0.308849} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.662370] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1204.663292] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.663530] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.663942] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1204.664298] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bebbdc-e165-49c2-a63e-9035fa471387 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.669899] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for the task: (returnval){ [ 1204.669899] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52d5d3fc-c3ba-5f2b-3015-f0ac999465ab" [ 1204.669899] env[67015]: _type = "Task" [ 1204.669899] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.678271] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52d5d3fc-c3ba-5f2b-3015-f0ac999465ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.183349] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.183925] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1205.184214] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.815187] env[67015]: DEBUG nova.compute.manager [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Received event network-changed-17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1205.815436] env[67015]: DEBUG nova.compute.manager [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Refreshing instance network info cache due to event network-changed-17cf0f6b-a3bd-4d1d-95ec-28c941296684. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1205.815594] env[67015]: DEBUG oslo_concurrency.lockutils [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] Acquiring lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.815736] env[67015]: DEBUG oslo_concurrency.lockutils [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] Acquired lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.815895] env[67015]: DEBUG nova.network.neutron [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Refreshing network info cache for port 17cf0f6b-a3bd-4d1d-95ec-28c941296684 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1205.823322] env[67015]: DEBUG nova.compute.manager [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Received event network-changed-a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1205.823607] env[67015]: DEBUG nova.compute.manager [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Refreshing instance network info cache due to event network-changed-a2694771-e780-462e-9f0e-0f8d0808e78c. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1205.823848] env[67015]: DEBUG oslo_concurrency.lockutils [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] Acquiring lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.824014] env[67015]: DEBUG oslo_concurrency.lockutils [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] Acquired lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.824201] env[67015]: DEBUG nova.network.neutron [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Refreshing network info cache for port a2694771-e780-462e-9f0e-0f8d0808e78c {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1206.253735] env[67015]: DEBUG nova.network.neutron [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Updated VIF entry in instance network info cache for port a2694771-e780-462e-9f0e-0f8d0808e78c. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1206.254102] env[67015]: DEBUG nova.network.neutron [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Updating instance_info_cache with network_info: [{"id": "a2694771-e780-462e-9f0e-0f8d0808e78c", "address": "fa:16:3e:9f:1c:50", "network": {"id": "32ff97ea-2879-466b-afef-c63118b64ac9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-589104789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23b843f1db954cf6be117fabe2268c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2694771-e7", "ovs_interfaceid": "a2694771-e780-462e-9f0e-0f8d0808e78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.266507] env[67015]: DEBUG oslo_concurrency.lockutils [req-bce6908c-900c-46f0-a2c3-a0fe4b6fed67 req-6b109243-2c4b-4b8f-a368-b03950e656dc service nova] Releasing lock "refresh_cache-7b744243-c7e5-4253-9273-9d7f84772d96" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.626307] env[67015]: DEBUG nova.network.neutron [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Updated VIF entry in instance network info cache for port 17cf0f6b-a3bd-4d1d-95ec-28c941296684. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1206.626654] env[67015]: DEBUG nova.network.neutron [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Updating instance_info_cache with network_info: [{"id": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "address": "fa:16:3e:3b:27:85", "network": {"id": "0c302ac0-c37a-4b65-9b73-f50b93c12025", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1281668385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fadd44ab0304a74951242bff6ce54fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17cf0f6b-a3", "ovs_interfaceid": "17cf0f6b-a3bd-4d1d-95ec-28c941296684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.636815] env[67015]: DEBUG oslo_concurrency.lockutils [req-8e42a5e0-a5ef-4808-ad5f-a2efd9cde8f1 req-901d78fd-3c31-45dd-b3d6-fb5a2b5c1ce4 service nova] Releasing lock "refresh_cache-98f18180-bd1c-492d-9fbe-4bf306aca4b2" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.432077] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "7b744243-c7e5-4253-9273-9d7f84772d96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.728253] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.827252] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cfbb2e8d-aa60-44b6-b153-a8a1a642f04b tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "55346ba0-b93e-489f-8b89-640b7e33e384" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.827556] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cfbb2e8d-aa60-44b6-b153-a8a1a642f04b tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "55346ba0-b93e-489f-8b89-640b7e33e384" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.514643] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.514956] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.515110] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.025014] env[67015]: WARNING oslo_vmware.rw_handles [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1246.025014] env[67015]: ERROR oslo_vmware.rw_handles [ 1246.025594] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1246.027563] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1246.027838] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Copying Virtual Disk [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/ca05b5ae-4f3c-44e1-8b1e-0760281f5db9/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1246.028166] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e936349b-6846-4e26-9bc9-f91ddb6a7133 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.035799] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for the task: (returnval){ [ 1246.035799] env[67015]: value = "task-3114487" [ 1246.035799] env[67015]: _type = "Task" [ 1246.035799] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.043854] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Task: {'id': task-3114487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.547944] env[67015]: DEBUG oslo_vmware.exceptions [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1246.548384] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.548917] env[67015]: ERROR nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1246.548917] env[67015]: Faults: ['InvalidArgument'] [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Traceback (most recent call last): [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] yield resources [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self.driver.spawn(context, instance, image_meta, [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self._fetch_image_if_missing(context, vi) [ 1246.548917] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] image_cache(vi, tmp_image_ds_loc) [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] vm_util.copy_virtual_disk( [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] session._wait_for_task(vmdk_copy_task) [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return self.wait_for_task(task_ref) [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return evt.wait() [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] result = hub.switch() [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1246.549348] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return self.greenlet.switch() [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self.f(*self.args, **self.kw) [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] raise exceptions.translate_fault(task_info.error) [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Faults: ['InvalidArgument'] [ 1246.549776] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] [ 1246.549776] env[67015]: INFO nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Terminating instance [ 1246.550836] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.551051] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1246.551676] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1246.551868] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1246.552110] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b3083a8-473f-4378-8120-0d25b8726b71 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.554404] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6279267-1ec8-4922-a947-7d48d5078e1b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.561602] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1246.562584] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15d27867-28c9-4fbb-ab3e-4dee06c291dc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.563992] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1246.564185] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1246.564844] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3f72c3b-8b96-431b-8db0-78de95e66c67 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.569579] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for the task: (returnval){ [ 1246.569579] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]525c5294-4bd8-e66d-e253-a99f8bc42d9c" [ 1246.569579] env[67015]: _type = "Task" [ 1246.569579] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.576688] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]525c5294-4bd8-e66d-e253-a99f8bc42d9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.633018] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1246.633018] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1246.633018] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Deleting the datastore file [datastore2] 5c77964f-e902-489a-86c3-9c9d4dd304d3 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1246.633018] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31c5176b-672f-4a91-b6dc-d8c4b21e50ab {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.638627] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for the task: (returnval){ [ 1246.638627] env[67015]: value = "task-3114489" [ 1246.638627] env[67015]: _type = "Task" [ 1246.638627] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.646158] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Task: {'id': task-3114489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.081066] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1247.081066] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Creating directory with path [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.081066] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-def63e11-f187-419b-9cb7-c77f3f5e263c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.091311] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Created directory with path [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.091457] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Fetch image to [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1247.091620] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1247.092342] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0f5f8c-31f1-4a96-abe7-3718d1e33a6b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.098605] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5eaccd-5a09-4868-bcb9-1f1f82993c79 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.108009] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfaaebe-59e0-4c79-8ed1-4a661453b2e6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.137323] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476104e7-6dd3-47b5-95c9-2a7b9cf86876 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.147354] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-35145bbb-7c08-4b23-8fb0-bc02b8d9add4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.148941] env[67015]: DEBUG oslo_vmware.api [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Task: {'id': task-3114489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07533} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.149192] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.149370] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1247.149543] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1247.149742] env[67015]: INFO nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1247.151836] env[67015]: DEBUG nova.compute.claims [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1247.152019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.152244] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.168476] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1247.221336] env[67015]: DEBUG oslo_vmware.rw_handles [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1247.280864] env[67015]: DEBUG oslo_vmware.rw_handles [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1247.281049] env[67015]: DEBUG oslo_vmware.rw_handles [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1247.447209] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877cb9fb-34be-4136-9f13-176eafe35533 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.454887] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfb904b-bef9-49fc-bb2f-1eb907c1663e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.485131] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8a3f9b-298b-4942-937a-2cbb39075c86 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.492073] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7e22f5-e759-4297-8e81-3c6812e8aad1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.504862] env[67015]: DEBUG nova.compute.provider_tree [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.513510] env[67015]: DEBUG nova.scheduler.client.report [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1247.516741] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.530194] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.378s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.530728] env[67015]: ERROR nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1247.530728] env[67015]: Faults: ['InvalidArgument'] [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Traceback (most recent call last): [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self.driver.spawn(context, instance, image_meta, [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self._fetch_image_if_missing(context, vi) [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] image_cache(vi, tmp_image_ds_loc) [ 1247.530728] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] vm_util.copy_virtual_disk( [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] session._wait_for_task(vmdk_copy_task) [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return self.wait_for_task(task_ref) [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return evt.wait() [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] result = hub.switch() [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] return self.greenlet.switch() [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1247.531149] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] self.f(*self.args, **self.kw) [ 1247.531553] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1247.531553] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] raise exceptions.translate_fault(task_info.error) [ 1247.531553] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1247.531553] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Faults: ['InvalidArgument'] [ 1247.531553] env[67015]: ERROR nova.compute.manager [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] [ 1247.531553] env[67015]: DEBUG nova.compute.utils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1247.532917] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Build of instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 was re-scheduled: A specified parameter was not correct: fileType [ 1247.532917] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1247.533328] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1247.533499] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1247.533670] env[67015]: DEBUG nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1247.533833] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1248.139057] env[67015]: DEBUG nova.network.neutron [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.150446] env[67015]: INFO nova.compute.manager [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Took 0.62 seconds to deallocate network for instance. [ 1248.245986] env[67015]: INFO nova.scheduler.client.report [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Deleted allocations for instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 [ 1248.266494] env[67015]: DEBUG oslo_concurrency.lockutils [None req-74647b6e-2fdf-4dc0-9fcb-ef8e4b2362a8 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.407s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.267728] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.197s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.268140] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Acquiring lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.268272] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.268562] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.270625] env[67015]: INFO nova.compute.manager [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Terminating instance [ 1248.272638] env[67015]: DEBUG nova.compute.manager [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1248.272870] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1248.273379] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24486860-fc81-48ce-bb43-bd8865f92125 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.285023] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529713d6-10c1-4aa8-81d2-520798d7d7bf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.293809] env[67015]: DEBUG nova.compute.manager [None req-c93f7f52-e61a-4337-a511-b62748cef194 tempest-ServerActionsTestOtherA-1420094941 tempest-ServerActionsTestOtherA-1420094941-project-member] [instance: 3c14fe11-5172-4611-acf8-c29746a5658e] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.315084] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5c77964f-e902-489a-86c3-9c9d4dd304d3 could not be found. [ 1248.315329] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1248.315547] env[67015]: INFO nova.compute.manager [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1248.315787] env[67015]: DEBUG oslo.service.loopingcall [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1248.316018] env[67015]: DEBUG nova.compute.manager [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1248.316110] env[67015]: DEBUG nova.network.neutron [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1248.318363] env[67015]: DEBUG nova.compute.manager [None req-c93f7f52-e61a-4337-a511-b62748cef194 tempest-ServerActionsTestOtherA-1420094941 tempest-ServerActionsTestOtherA-1420094941-project-member] [instance: 3c14fe11-5172-4611-acf8-c29746a5658e] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.337467] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c93f7f52-e61a-4337-a511-b62748cef194 tempest-ServerActionsTestOtherA-1420094941 tempest-ServerActionsTestOtherA-1420094941-project-member] Lock "3c14fe11-5172-4611-acf8-c29746a5658e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.113s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.345367] env[67015]: DEBUG nova.network.neutron [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.347304] env[67015]: DEBUG nova.compute.manager [None req-fec408af-71d4-4b95-994d-e15a1b6e2fbb tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6d6a0e44-f9b4-4da4-948b-b05b86c93a3d] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.352819] env[67015]: INFO nova.compute.manager [-] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] Took 0.04 seconds to deallocate network for instance. [ 1248.369683] env[67015]: DEBUG nova.compute.manager [None req-fec408af-71d4-4b95-994d-e15a1b6e2fbb tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6d6a0e44-f9b4-4da4-948b-b05b86c93a3d] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.390872] env[67015]: DEBUG oslo_concurrency.lockutils [None req-fec408af-71d4-4b95-994d-e15a1b6e2fbb tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6d6a0e44-f9b4-4da4-948b-b05b86c93a3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.791s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.399451] env[67015]: DEBUG nova.compute.manager [None req-73e9b7b3-7756-402e-b1da-57f4cfa8e817 tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.422445] env[67015]: DEBUG nova.compute.manager [None req-73e9b7b3-7756-402e-b1da-57f4cfa8e817 tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.443640] env[67015]: DEBUG oslo_concurrency.lockutils [None req-2b572639-b712-416f-8c76-fb10065536b2 tempest-VolumesAssistedSnapshotsTest-1084042493 tempest-VolumesAssistedSnapshotsTest-1084042493-project-member] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.444621] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 77.631s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.444997] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 5c77964f-e902-489a-86c3-9c9d4dd304d3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1248.444997] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "5c77964f-e902-489a-86c3-9c9d4dd304d3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.451366] env[67015]: DEBUG oslo_concurrency.lockutils [None req-73e9b7b3-7756-402e-b1da-57f4cfa8e817 tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "13d49c4b-bc0e-4e9e-aecb-59fd2745e9c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.712s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.459458] env[67015]: DEBUG nova.compute.manager [None req-16e504d0-81d5-46ba-b76c-e81f03db9c1b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 3fda5b6d-44b1-412a-9eff-0e8be3c725c1] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.481563] env[67015]: DEBUG nova.compute.manager [None req-16e504d0-81d5-46ba-b76c-e81f03db9c1b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 3fda5b6d-44b1-412a-9eff-0e8be3c725c1] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.500356] env[67015]: DEBUG oslo_concurrency.lockutils [None req-16e504d0-81d5-46ba-b76c-e81f03db9c1b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "3fda5b6d-44b1-412a-9eff-0e8be3c725c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.099s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.508974] env[67015]: DEBUG nova.compute.manager [None req-cd6196ed-5ab7-4b98-8652-b2ed5d1cd70e tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] [instance: 2698cd39-5b23-48b8-ae60-48f7576c1546] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.532393] env[67015]: DEBUG nova.compute.manager [None req-cd6196ed-5ab7-4b98-8652-b2ed5d1cd70e tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] [instance: 2698cd39-5b23-48b8-ae60-48f7576c1546] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.552716] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cd6196ed-5ab7-4b98-8652-b2ed5d1cd70e tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Lock "2698cd39-5b23-48b8-ae60-48f7576c1546" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.997s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.561479] env[67015]: DEBUG nova.compute.manager [None req-30872710-9e00-43a6-9de8-e712d8c5923b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 2b1b82aa-9f54-4829-98bf-011f6289a534] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.583537] env[67015]: DEBUG nova.compute.manager [None req-30872710-9e00-43a6-9de8-e712d8c5923b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] [instance: 2b1b82aa-9f54-4829-98bf-011f6289a534] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.606797] env[67015]: DEBUG oslo_concurrency.lockutils [None req-30872710-9e00-43a6-9de8-e712d8c5923b tempest-ListServerFiltersTestJSON-1317128283 tempest-ListServerFiltersTestJSON-1317128283-project-member] Lock "2b1b82aa-9f54-4829-98bf-011f6289a534" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.417s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.615753] env[67015]: DEBUG nova.compute.manager [None req-05235453-ab08-4e7a-806a-954b35f68b31 tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] [instance: 46a76bbb-28ff-4b71-aa4e-f946ef586b64] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.642699] env[67015]: DEBUG nova.compute.manager [None req-05235453-ab08-4e7a-806a-954b35f68b31 tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] [instance: 46a76bbb-28ff-4b71-aa4e-f946ef586b64] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.665967] env[67015]: DEBUG oslo_concurrency.lockutils [None req-05235453-ab08-4e7a-806a-954b35f68b31 tempest-ListImageFiltersTestJSON-1257660884 tempest-ListImageFiltersTestJSON-1257660884-project-member] Lock "46a76bbb-28ff-4b71-aa4e-f946ef586b64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.474s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.684540] env[67015]: DEBUG nova.compute.manager [None req-f0d2db5d-6d0f-4fc0-bf25-ecddd6101337 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: a51672b6-f918-4ba3-9c55-af2edb3ec693] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.712534] env[67015]: DEBUG nova.compute.manager [None req-f0d2db5d-6d0f-4fc0-bf25-ecddd6101337 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] [instance: a51672b6-f918-4ba3-9c55-af2edb3ec693] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1248.733697] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f0d2db5d-6d0f-4fc0-bf25-ecddd6101337 tempest-AttachVolumeNegativeTest-173917919 tempest-AttachVolumeNegativeTest-173917919-project-member] Lock "a51672b6-f918-4ba3-9c55-af2edb3ec693" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.384s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.744131] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1248.800981] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.802031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.803883] env[67015]: INFO nova.compute.claims [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.039879] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b41df8f-963b-4c7b-8758-64c0c4d145bc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.047538] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd966949-b7b9-43d1-a0a9-430d632fddce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.076298] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4c9ca9-32ff-400e-8503-da92d6cbe216 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.083024] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9642d0a8-c04d-451d-9a0b-715a9257f4bc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.095409] env[67015]: DEBUG nova.compute.provider_tree [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.106282] env[67015]: DEBUG nova.scheduler.client.report [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1249.120867] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.121380] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1249.153682] env[67015]: DEBUG nova.compute.utils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1249.154836] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1249.154836] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1249.163180] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1249.232054] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1249.235509] env[67015]: DEBUG nova.policy [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f4db804a18c477b8654f84acd95612c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ab8c68f6d18484886f01ef94496fb0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1249.256890] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.257142] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.257324] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.257525] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.257671] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.257818] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.258039] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.258204] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.258372] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.258535] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.258709] env[67015]: DEBUG nova.virt.hardware [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.259553] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5aab19d-16fa-4dd2-a07f-507db5097d61 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.268810] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bd80c2-3aad-45c5-b6e5-7060156e1907 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.509588] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.537602] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.537798] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.537957] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.538114] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1249.546268] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Successfully created port: 29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1250.514026] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.525884] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.526135] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.526309] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.526469] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1250.527752] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05e5e0d-86ff-4d43-b27e-3968065b4d21 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.537666] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994c9179-6e55-435c-9ee6-2448d9280af0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.558041] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1503fe2b-00d8-482b-b412-5bca262a06ba {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.563599] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87541d4b-66a5-4109-964e-50846fb5101f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.593150] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181016MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1250.593567] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.593666] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.668084] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.668297] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.668482] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.668634] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.668797] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.668953] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.669122] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.669287] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.669428] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.669587] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.681496] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.692407] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.702072] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance daa757b3-3ad6-477f-a7e9-b81a863e9a8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.716853] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Successfully updated port: 29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1250.718630] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 01c3904e-b014-4f3a-8647-ba5fb786d960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.733022] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.734371] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.734632] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquired lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.734874] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1250.745059] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 55346ba0-b93e-489f-8b89-640b7e33e384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1250.745620] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1250.747249] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1250.758955] env[67015]: DEBUG nova.compute.manager [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Received event network-vif-plugged-29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1250.759186] env[67015]: DEBUG oslo_concurrency.lockutils [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] Acquiring lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.759397] env[67015]: DEBUG oslo_concurrency.lockutils [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.759565] env[67015]: DEBUG oslo_concurrency.lockutils [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.759760] env[67015]: DEBUG nova.compute.manager [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] No waiting events found dispatching network-vif-plugged-29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1250.760090] env[67015]: WARNING nova.compute.manager [req-9b5101a0-e5a0-4616-95c5-4e3d040a692c req-6f86fa4a-ad67-425d-853e-b1001ba958a7 service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Received unexpected event network-vif-plugged-29b62133-e8e5-4bf4-be79-0ac5e17c00d5 for instance with vm_state building and task_state spawning. [ 1250.811397] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1250.946449] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72be7dcd-2a28-4261-80db-0d135901108c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.954174] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3396b49f-0f40-4402-accc-d6514fd1133b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.984527] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f33553-ab97-40e4-bbee-eb8a563e4d70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.991879] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e28983-7ca5-4eee-ad32-eef10efeec4f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.004627] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.012629] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1251.019642] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Updating instance_info_cache with network_info: [{"id": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "address": "fa:16:3e:8e:8d:b3", "network": {"id": "9d012d05-e175-4dc6-b7ac-fcb02c5b5e69", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-968276058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ab8c68f6d18484886f01ef94496fb0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b62133-e8", "ovs_interfaceid": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.030430] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1251.030697] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.437s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.031453] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Releasing lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.031717] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance network_info: |[{"id": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "address": "fa:16:3e:8e:8d:b3", "network": {"id": "9d012d05-e175-4dc6-b7ac-fcb02c5b5e69", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-968276058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ab8c68f6d18484886f01ef94496fb0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b62133-e8", "ovs_interfaceid": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1251.032108] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:8d:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29b62133-e8e5-4bf4-be79-0ac5e17c00d5', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.039414] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Creating folder: Project (0ab8c68f6d18484886f01ef94496fb0b). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.039867] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96caadd0-f684-461d-9841-7aa5a893bbf8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.049317] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Created folder: Project (0ab8c68f6d18484886f01ef94496fb0b) in parent group-v623108. [ 1251.049507] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Creating folder: Instances. Parent ref: group-v623187. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.049728] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98518956-7014-44a0-948c-c648bd9a1fdf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.058079] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Created folder: Instances in parent group-v623187. [ 1251.058312] env[67015]: DEBUG oslo.service.loopingcall [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1251.058496] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1251.058683] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db30ccf7-05a7-4252-b047-04eda228a8c1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.077164] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.077164] env[67015]: value = "task-3114492" [ 1251.077164] env[67015]: _type = "Task" [ 1251.077164] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.084009] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114492, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.586908] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114492, 'name': CreateVM_Task, 'duration_secs': 0.295611} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.587198] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1251.587809] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.587977] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.588308] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1251.588560] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8abe4476-2917-4587-a147-bff14d83971d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.593044] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for the task: (returnval){ [ 1251.593044] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52edbd0d-fd23-afdc-71ad-320ed536b5f7" [ 1251.593044] env[67015]: _type = "Task" [ 1251.593044] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.600575] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52edbd0d-fd23-afdc-71ad-320ed536b5f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.103531] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.103748] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1252.103956] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.838314] env[67015]: DEBUG nova.compute.manager [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Received event network-changed-29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1252.838627] env[67015]: DEBUG nova.compute.manager [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Refreshing instance network info cache due to event network-changed-29b62133-e8e5-4bf4-be79-0ac5e17c00d5. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1252.838778] env[67015]: DEBUG oslo_concurrency.lockutils [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] Acquiring lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.838923] env[67015]: DEBUG oslo_concurrency.lockutils [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] Acquired lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.839103] env[67015]: DEBUG nova.network.neutron [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Refreshing network info cache for port 29b62133-e8e5-4bf4-be79-0ac5e17c00d5 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1253.180162] env[67015]: DEBUG nova.network.neutron [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Updated VIF entry in instance network info cache for port 29b62133-e8e5-4bf4-be79-0ac5e17c00d5. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1253.180550] env[67015]: DEBUG nova.network.neutron [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Updating instance_info_cache with network_info: [{"id": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "address": "fa:16:3e:8e:8d:b3", "network": {"id": "9d012d05-e175-4dc6-b7ac-fcb02c5b5e69", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-968276058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ab8c68f6d18484886f01ef94496fb0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b62133-e8", "ovs_interfaceid": "29b62133-e8e5-4bf4-be79-0ac5e17c00d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.189728] env[67015]: DEBUG oslo_concurrency.lockutils [req-44ed704a-3077-4d3c-a099-f60a4e92640b req-16950222-b1a4-4e89-b937-a9d7dc0d7e0e service nova] Releasing lock "refresh_cache-7a5c2a57-b28d-45e0-ab7b-5a649758b69b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.030949] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.031313] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1254.031313] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1254.053620] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.053794] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.053917] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054036] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054169] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054297] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054422] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054544] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054665] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054786] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1254.054906] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1274.749829] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "944728f0-7db6-4cca-a51c-7acb5998cb12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.750141] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.773367] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "b7b12357-09c4-402f-bf1c-f8872d86d17b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.773589] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "b7b12357-09c4-402f-bf1c-f8872d86d17b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.092986] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.719210] env[67015]: WARNING oslo_vmware.rw_handles [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1295.719210] env[67015]: ERROR oslo_vmware.rw_handles [ 1295.719865] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1295.721994] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1295.722281] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Copying Virtual Disk [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/dd6f3d90-027c-435d-b478-404e423f6830/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1295.722584] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a94edd2a-823e-4e3d-a79d-580ab9ebad47 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.730895] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for the task: (returnval){ [ 1295.730895] env[67015]: value = "task-3114493" [ 1295.730895] env[67015]: _type = "Task" [ 1295.730895] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.739257] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Task: {'id': task-3114493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.241222] env[67015]: DEBUG oslo_vmware.exceptions [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1296.241507] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.242111] env[67015]: ERROR nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.242111] env[67015]: Faults: ['InvalidArgument'] [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Traceback (most recent call last): [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] yield resources [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self.driver.spawn(context, instance, image_meta, [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self._fetch_image_if_missing(context, vi) [ 1296.242111] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] image_cache(vi, tmp_image_ds_loc) [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] vm_util.copy_virtual_disk( [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] session._wait_for_task(vmdk_copy_task) [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return self.wait_for_task(task_ref) [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return evt.wait() [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] result = hub.switch() [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1296.243625] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return self.greenlet.switch() [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self.f(*self.args, **self.kw) [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] raise exceptions.translate_fault(task_info.error) [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Faults: ['InvalidArgument'] [ 1296.243939] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] [ 1296.243939] env[67015]: INFO nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Terminating instance [ 1296.244275] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.244489] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.244732] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6db78849-1c58-4e1b-842b-f6ace65c3d71 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.247134] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1296.247332] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1296.248123] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6f1951-657c-4372-a36e-b1e5d7acb12d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.255131] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1296.255370] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09757243-25f8-4363-8397-83002ae88d0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.257690] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.257876] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1296.258893] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fc553c7-0a82-4d24-9259-55d94368c5d4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.263482] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for the task: (returnval){ [ 1296.263482] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]526b1aab-1794-a97f-b6c0-5b895ac4482a" [ 1296.263482] env[67015]: _type = "Task" [ 1296.263482] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.270841] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]526b1aab-1794-a97f-b6c0-5b895ac4482a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.324805] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1296.325174] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1296.325473] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Deleting the datastore file [datastore2] 96feb18e-14ee-40cf-bd5d-89a4e773c797 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.325854] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb26bd4b-1573-4eb0-b330-6b172b38c68d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.333712] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for the task: (returnval){ [ 1296.333712] env[67015]: value = "task-3114495" [ 1296.333712] env[67015]: _type = "Task" [ 1296.333712] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.345462] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Task: {'id': task-3114495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.773922] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1296.774337] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Creating directory with path [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.774453] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a502248-77ba-40b8-89d7-b480cda0ef34 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.787021] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Created directory with path [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.787245] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Fetch image to [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1296.787394] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1296.788215] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b09982-8cba-464a-b4d3-5814bb027389 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.794884] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361012f6-49f2-4138-b55f-984048998cb6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.804458] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76275a8b-7fe2-4aa8-8ff5-d4b849bc26a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.840633] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae9038b-6d17-4e8a-a4e5-0a6ff197dc94 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.849351] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-69ae074e-a050-49f2-bbe9-0b5be625dc17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.851193] env[67015]: DEBUG oslo_vmware.api [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Task: {'id': task-3114495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095461} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.851441] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.851622] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1296.851793] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1296.851965] env[67015]: INFO nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1296.854107] env[67015]: DEBUG nova.compute.claims [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1296.854282] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.854494] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.874145] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1296.933966] env[67015]: DEBUG oslo_vmware.rw_handles [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1296.996532] env[67015]: DEBUG oslo_vmware.rw_handles [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1296.996733] env[67015]: DEBUG oslo_vmware.rw_handles [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1297.181879] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e6c3a3-7881-4fb2-b5fa-28cf72943d5f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.189500] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab6540d-3c87-43eb-b2c4-5761974a639a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.219856] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53467ce9-8f9f-4aea-a78e-159fc3f2f590 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.226943] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8098f2-820a-4522-890c-602e5fb62593 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.239786] env[67015]: DEBUG nova.compute.provider_tree [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.248750] env[67015]: DEBUG nova.scheduler.client.report [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1297.264564] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.410s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.265179] env[67015]: ERROR nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.265179] env[67015]: Faults: ['InvalidArgument'] [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Traceback (most recent call last): [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self.driver.spawn(context, instance, image_meta, [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self._fetch_image_if_missing(context, vi) [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] image_cache(vi, tmp_image_ds_loc) [ 1297.265179] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] vm_util.copy_virtual_disk( [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] session._wait_for_task(vmdk_copy_task) [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return self.wait_for_task(task_ref) [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return evt.wait() [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] result = hub.switch() [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] return self.greenlet.switch() [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1297.265553] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] self.f(*self.args, **self.kw) [ 1297.266114] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1297.266114] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] raise exceptions.translate_fault(task_info.error) [ 1297.266114] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.266114] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Faults: ['InvalidArgument'] [ 1297.266114] env[67015]: ERROR nova.compute.manager [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] [ 1297.266114] env[67015]: DEBUG nova.compute.utils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1297.267413] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Build of instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 was re-scheduled: A specified parameter was not correct: fileType [ 1297.267413] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1297.267865] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1297.268050] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1297.268277] env[67015]: DEBUG nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1297.268454] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1297.972934] env[67015]: DEBUG nova.network.neutron [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.988250] env[67015]: INFO nova.compute.manager [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Took 0.72 seconds to deallocate network for instance. [ 1298.093110] env[67015]: INFO nova.scheduler.client.report [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Deleted allocations for instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 [ 1298.112591] env[67015]: DEBUG oslo_concurrency.lockutils [None req-58c3221c-4d83-4214-a209-4fd799088345 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 618.312s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.114204] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.155s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.114204] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Acquiring lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.114367] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.114413] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.116395] env[67015]: INFO nova.compute.manager [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Terminating instance [ 1298.118412] env[67015]: DEBUG nova.compute.manager [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1298.118604] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1298.119079] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fa9b646-868b-4fce-93fb-0508ee1e4199 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.125682] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1298.130824] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1722c3-365c-408e-8873-3eb0af01b827 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.163949] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 96feb18e-14ee-40cf-bd5d-89a4e773c797 could not be found. [ 1298.164210] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1298.165030] env[67015]: INFO nova.compute.manager [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1298.165030] env[67015]: DEBUG oslo.service.loopingcall [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1298.166962] env[67015]: DEBUG nova.compute.manager [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1298.167074] env[67015]: DEBUG nova.network.neutron [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1298.181277] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.181522] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.183015] env[67015]: INFO nova.compute.claims [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.200335] env[67015]: DEBUG nova.network.neutron [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.215711] env[67015]: INFO nova.compute.manager [-] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] Took 0.05 seconds to deallocate network for instance. [ 1298.309024] env[67015]: DEBUG oslo_concurrency.lockutils [None req-aeb783d2-3fe6-4971-a64b-807197715d80 tempest-AttachInterfacesUnderV243Test-1753202259 tempest-AttachInterfacesUnderV243Test-1753202259-project-member] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.309690] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 127.495s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.309883] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 96feb18e-14ee-40cf-bd5d-89a4e773c797] During sync_power_state the instance has a pending task (deleting). Skip. [ 1298.310072] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "96feb18e-14ee-40cf-bd5d-89a4e773c797" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.443526] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f2082e-6ea0-41cc-95d0-70eb118e1e65 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.451158] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47df2453-8f22-4798-83ca-0ed1589605eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.481650] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e0a268-f29c-422c-b203-87f0a1a53f62 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.488552] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1889fdea-9c9a-4f40-8992-ae0bfec87e02 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.501191] env[67015]: DEBUG nova.compute.provider_tree [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.512366] env[67015]: DEBUG nova.scheduler.client.report [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1298.525804] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.526286] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1298.568027] env[67015]: DEBUG nova.compute.utils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1298.569598] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1298.569598] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1298.578354] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1298.625312] env[67015]: DEBUG nova.policy [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '585aaf69bf474881ba8f6f13b7fba1b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54ce93521ed148d397c6dd8905557b34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1298.640175] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1298.667608] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1298.667890] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1298.668067] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.668253] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1298.668418] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.668548] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1298.668779] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1298.668958] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1298.669147] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1298.669313] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1298.669482] env[67015]: DEBUG nova.virt.hardware [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1298.670351] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4496079b-9efa-4afa-aea6-48bd5e56ea17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.678111] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1bd5ea-5330-4d4c-bdb8-d77efcc3b646 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.930809] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Successfully created port: 3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1299.877470] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Successfully updated port: 3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1299.888658] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1299.888817] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.888973] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1299.933868] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1300.090369] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Updating instance_info_cache with network_info: [{"id": "3deefd62-a511-4e04-a908-a57c19f859c5", "address": "fa:16:3e:44:77:fb", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3deefd62-a5", "ovs_interfaceid": "3deefd62-a511-4e04-a908-a57c19f859c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.101755] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.102072] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance network_info: |[{"id": "3deefd62-a511-4e04-a908-a57c19f859c5", "address": "fa:16:3e:44:77:fb", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3deefd62-a5", "ovs_interfaceid": "3deefd62-a511-4e04-a908-a57c19f859c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1300.102487] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:77:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3deefd62-a511-4e04-a908-a57c19f859c5', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.109803] env[67015]: DEBUG oslo.service.loopingcall [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.110296] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1300.110519] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b554368-3743-42fa-a579-66091cba4b9e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.130079] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.130079] env[67015]: value = "task-3114496" [ 1300.130079] env[67015]: _type = "Task" [ 1300.130079] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.137118] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114496, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.214747] env[67015]: DEBUG nova.compute.manager [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Received event network-vif-plugged-3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1300.214858] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Acquiring lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.215157] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.215306] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.215477] env[67015]: DEBUG nova.compute.manager [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] No waiting events found dispatching network-vif-plugged-3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1300.215852] env[67015]: WARNING nova.compute.manager [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Received unexpected event network-vif-plugged-3deefd62-a511-4e04-a908-a57c19f859c5 for instance with vm_state building and task_state spawning. [ 1300.216094] env[67015]: DEBUG nova.compute.manager [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Received event network-changed-3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1300.216300] env[67015]: DEBUG nova.compute.manager [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Refreshing instance network info cache due to event network-changed-3deefd62-a511-4e04-a908-a57c19f859c5. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1300.216531] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Acquiring lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.216707] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Acquired lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.216882] env[67015]: DEBUG nova.network.neutron [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Refreshing network info cache for port 3deefd62-a511-4e04-a908-a57c19f859c5 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1300.641784] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114496, 'name': CreateVM_Task, 'duration_secs': 0.287509} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.641784] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1300.641784] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.641784] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.641784] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1300.642108] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f566792-e97f-42de-879e-66a63b8a3e69 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.646374] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1300.646374] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]525fce49-5561-4806-f886-f14ed0ef5a6e" [ 1300.646374] env[67015]: _type = "Task" [ 1300.646374] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.656111] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]525fce49-5561-4806-f886-f14ed0ef5a6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.733993] env[67015]: DEBUG nova.network.neutron [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Updated VIF entry in instance network info cache for port 3deefd62-a511-4e04-a908-a57c19f859c5. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1300.734402] env[67015]: DEBUG nova.network.neutron [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Updating instance_info_cache with network_info: [{"id": "3deefd62-a511-4e04-a908-a57c19f859c5", "address": "fa:16:3e:44:77:fb", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3deefd62-a5", "ovs_interfaceid": "3deefd62-a511-4e04-a908-a57c19f859c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.743750] env[67015]: DEBUG oslo_concurrency.lockutils [req-a97ae92e-5a75-4a13-a6bc-2d984871f5ea req-a93eb03a-94d7-45ce-8505-8ab1098d4c1c service nova] Releasing lock "refresh_cache-c7d8b42d-6455-4489-9f62-8ab9f85e7f76" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.157246] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.157246] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.157246] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.513616] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.512064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.515027] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.905305] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "41f47735-f679-4b30-8e30-f917dcf4db42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.905522] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.513832] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.514862] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.515354] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.515036] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.515036] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1311.515036] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.526699] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.526923] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.527106] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.527265] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1311.528398] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7599e6-e1c4-464f-ab6a-22a56328ce77 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.537385] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e6fd3e-1849-4a2d-8d3b-fe7125ee63a0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.553976] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee16c633-fff8-4914-a608-01fe85a9dcd4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.560463] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cc8e86-d413-4fdc-82bd-7f735e65dd28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.589015] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180995MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1311.589186] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.589430] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.662139] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 30f91210-0318-4912-808b-843c2cd04ea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.662318] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.662450] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.662577] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.662700] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.662879] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.663016] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.663192] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.663342] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.663500] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.674795] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.685769] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance daa757b3-3ad6-477f-a7e9-b81a863e9a8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.699578] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 01c3904e-b014-4f3a-8647-ba5fb786d960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.709914] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.719535] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 55346ba0-b93e-489f-8b89-640b7e33e384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.730741] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.741145] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b7b12357-09c4-402f-bf1c-f8872d86d17b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.750895] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.751137] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1311.751287] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1311.950917] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30db24f4-5539-4309-b26a-57bc81e3e2e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.958380] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2190fe1-98d6-4d22-b359-1ce99c2fe78f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.988296] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99255b82-1ca2-4a7b-9cb8-edb29fab0b8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.995301] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c591aabb-613e-445a-a772-dbab10521130 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.007765] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.017389] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1312.030486] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1312.030661] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.441s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.030946] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.030946] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1316.031244] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1316.055431] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.055600] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.055747] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.055910] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056073] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056211] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056335] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056453] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056571] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056693] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1316.056812] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1345.731206] env[67015]: WARNING oslo_vmware.rw_handles [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1345.731206] env[67015]: ERROR oslo_vmware.rw_handles [ 1345.731903] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1345.733556] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1345.733795] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Copying Virtual Disk [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/6beaa687-a99d-4649-b892-c08f6eb2a6eb/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1345.734090] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7071f919-764a-4269-8bea-032e0c16acf9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.743101] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for the task: (returnval){ [ 1345.743101] env[67015]: value = "task-3114497" [ 1345.743101] env[67015]: _type = "Task" [ 1345.743101] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.750899] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Task: {'id': task-3114497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.256184] env[67015]: DEBUG oslo_vmware.exceptions [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1346.256184] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.256184] env[67015]: ERROR nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.256184] env[67015]: Faults: ['InvalidArgument'] [ 1346.256184] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Traceback (most recent call last): [ 1346.256184] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1346.256184] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] yield resources [ 1346.256184] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1346.256184] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self.driver.spawn(context, instance, image_meta, [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self._fetch_image_if_missing(context, vi) [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] image_cache(vi, tmp_image_ds_loc) [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] vm_util.copy_virtual_disk( [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] session._wait_for_task(vmdk_copy_task) [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return self.wait_for_task(task_ref) [ 1346.256536] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return evt.wait() [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] result = hub.switch() [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return self.greenlet.switch() [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self.f(*self.args, **self.kw) [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] raise exceptions.translate_fault(task_info.error) [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Faults: ['InvalidArgument'] [ 1346.256849] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] [ 1346.257175] env[67015]: INFO nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Terminating instance [ 1346.257175] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.257240] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.257454] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4be75b54-7190-4a72-9f90-3257f261afc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.259568] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1346.259757] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1346.260497] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c33ec33-fa50-4974-afd5-52147277400b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.267996] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1346.268211] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46085258-bbd0-4234-853c-73f14ede22ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.270209] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.270400] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1346.271300] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e6888f-4177-4e21-a9fa-9ad3b3cb3e2f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.275589] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for the task: (returnval){ [ 1346.275589] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5206edd2-db10-e7eb-3463-bda3e88f1393" [ 1346.275589] env[67015]: _type = "Task" [ 1346.275589] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.282451] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5206edd2-db10-e7eb-3463-bda3e88f1393, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.329868] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1346.330110] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1346.330324] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Deleting the datastore file [datastore2] 30f91210-0318-4912-808b-843c2cd04ea1 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.330704] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2b47f24-7a75-40dc-9d45-91fbe31e79ab {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.336463] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for the task: (returnval){ [ 1346.336463] env[67015]: value = "task-3114499" [ 1346.336463] env[67015]: _type = "Task" [ 1346.336463] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.343838] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Task: {'id': task-3114499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.785887] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1346.786231] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Creating directory with path [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.786408] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cd440e3-ce7c-4fac-b6c9-56c41fa543d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.797360] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Created directory with path [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.797543] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Fetch image to [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1346.797739] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1346.798502] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f40518-1357-4913-95ee-1e147461728f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.804797] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f79405-71ad-4c9a-8ac9-574a45da7c06 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.814296] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd80c76b-c29d-4741-af23-58f9cbf1ea34 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.846326] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f9147c-0fd6-4380-a5f2-a33830984aeb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.853531] env[67015]: DEBUG oslo_vmware.api [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Task: {'id': task-3114499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078305} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.854517] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1346.854705] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1346.854883] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1346.855067] env[67015]: INFO nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1346.856873] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6383ae39-b0a0-4eb4-a148-2f584691a9d1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.858762] env[67015]: DEBUG nova.compute.claims [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1346.858953] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.859189] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.882417] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1346.937144] env[67015]: DEBUG oslo_vmware.rw_handles [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1346.997181] env[67015]: DEBUG oslo_vmware.rw_handles [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1346.997377] env[67015]: DEBUG oslo_vmware.rw_handles [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1347.184965] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b051873f-280c-4a4b-979a-f1c2b4f30d49 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.197960] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b710a17-1cf1-4ebe-b196-c493cf5299b5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.229478] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd8a16d-0acd-4c20-a213-2da14fb5a707 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.236580] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3432bcb5-795b-45eb-b418-6f99d0d69694 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.249952] env[67015]: DEBUG nova.compute.provider_tree [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.260022] env[67015]: DEBUG nova.scheduler.client.report [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1347.272215] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.413s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.272759] env[67015]: ERROR nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.272759] env[67015]: Faults: ['InvalidArgument'] [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Traceback (most recent call last): [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self.driver.spawn(context, instance, image_meta, [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self._fetch_image_if_missing(context, vi) [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] image_cache(vi, tmp_image_ds_loc) [ 1347.272759] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] vm_util.copy_virtual_disk( [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] session._wait_for_task(vmdk_copy_task) [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return self.wait_for_task(task_ref) [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return evt.wait() [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] result = hub.switch() [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] return self.greenlet.switch() [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1347.273055] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] self.f(*self.args, **self.kw) [ 1347.273335] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1347.273335] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] raise exceptions.translate_fault(task_info.error) [ 1347.273335] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.273335] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Faults: ['InvalidArgument'] [ 1347.273335] env[67015]: ERROR nova.compute.manager [instance: 30f91210-0318-4912-808b-843c2cd04ea1] [ 1347.273472] env[67015]: DEBUG nova.compute.utils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1347.274859] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Build of instance 30f91210-0318-4912-808b-843c2cd04ea1 was re-scheduled: A specified parameter was not correct: fileType [ 1347.274859] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1347.275243] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1347.275415] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1347.275586] env[67015]: DEBUG nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1347.275748] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.636523] env[67015]: DEBUG nova.network.neutron [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.646642] env[67015]: INFO nova.compute.manager [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Took 0.37 seconds to deallocate network for instance. [ 1347.743486] env[67015]: INFO nova.scheduler.client.report [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Deleted allocations for instance 30f91210-0318-4912-808b-843c2cd04ea1 [ 1347.763580] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9975ebc8-247c-46cf-8d6d-bc73aa9ed14c tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.585s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.765044] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.551s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.765273] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Acquiring lock "30f91210-0318-4912-808b-843c2cd04ea1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.765477] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.765641] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.767839] env[67015]: INFO nova.compute.manager [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Terminating instance [ 1347.769597] env[67015]: DEBUG nova.compute.manager [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1347.769832] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1347.770348] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70fa67d4-916c-4391-95f0-8900d42c8364 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.780340] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe2c83-4f3c-4e6d-8223-27822a441001 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.792598] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1347.815093] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30f91210-0318-4912-808b-843c2cd04ea1 could not be found. [ 1347.815313] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1347.815497] env[67015]: INFO nova.compute.manager [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1347.815784] env[67015]: DEBUG oslo.service.loopingcall [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.816201] env[67015]: DEBUG nova.compute.manager [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1347.816201] env[67015]: DEBUG nova.network.neutron [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.842390] env[67015]: DEBUG nova.network.neutron [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.844036] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.844359] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.845781] env[67015]: INFO nova.compute.claims [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.851392] env[67015]: INFO nova.compute.manager [-] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] Took 0.03 seconds to deallocate network for instance. [ 1347.942996] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b9f28a75-baa6-4bfe-8b64-573e08410db2 tempest-ServersTestManualDisk-1446184680 tempest-ServersTestManualDisk-1446184680-project-member] Lock "30f91210-0318-4912-808b-843c2cd04ea1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.944278] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "30f91210-0318-4912-808b-843c2cd04ea1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 177.129s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.944278] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 30f91210-0318-4912-808b-843c2cd04ea1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1347.944278] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "30f91210-0318-4912-808b-843c2cd04ea1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.111475] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f46013-40e3-443c-8297-0563d7193088 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.118870] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f12f87-03db-4cf7-84d7-8f49e0d04683 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.149134] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9548f684-d443-4e54-acb5-e27d6bb38267 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.156277] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20b546d-b59d-40ff-86ce-47570065694c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.170011] env[67015]: DEBUG nova.compute.provider_tree [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.178971] env[67015]: DEBUG nova.scheduler.client.report [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1348.195236] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.196021] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1348.233596] env[67015]: DEBUG nova.compute.utils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1348.234854] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1348.235029] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1348.243320] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1348.313635] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1348.326917] env[67015]: DEBUG nova.policy [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b9c7e4a75ad40e4bebb325e92368b69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb929cfdc6f047fe9cb26a1527ff9d94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1348.343569] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1348.343814] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1348.343976] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1348.344184] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1348.344337] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1348.344489] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1348.344701] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1348.344864] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1348.345061] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1348.345243] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1348.345419] env[67015]: DEBUG nova.virt.hardware [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1348.346318] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1695d04-5a52-4326-b497-f2ef2480e611 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.354515] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94a2ef8-7507-4ffc-9ed5-6a8d4fbbe5ab {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.859365] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Successfully created port: 75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1349.819922] env[67015]: DEBUG nova.compute.manager [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Received event network-vif-plugged-75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1349.820375] env[67015]: DEBUG oslo_concurrency.lockutils [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] Acquiring lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.821698] env[67015]: DEBUG oslo_concurrency.lockutils [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.821698] env[67015]: DEBUG oslo_concurrency.lockutils [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.821698] env[67015]: DEBUG nova.compute.manager [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] No waiting events found dispatching network-vif-plugged-75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1349.821698] env[67015]: WARNING nova.compute.manager [req-56cbb226-e3b8-4aa2-b541-55e3653add86 req-8e60d8f0-ffc2-4d92-8b1b-7adfb67f9cf6 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Received unexpected event network-vif-plugged-75018c11-aa90-4c64-a393-593a8f540341 for instance with vm_state building and task_state spawning. [ 1349.835791] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Successfully updated port: 75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.849226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.849226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquired lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.849226] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1349.894176] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1350.067603] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Updating instance_info_cache with network_info: [{"id": "75018c11-aa90-4c64-a393-593a8f540341", "address": "fa:16:3e:6f:a6:93", "network": {"id": "f76f1eb0-e589-4f30-9c3a-82dae74ca911", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-694386252-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb929cfdc6f047fe9cb26a1527ff9d94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75018c11-aa", "ovs_interfaceid": "75018c11-aa90-4c64-a393-593a8f540341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.081572] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Releasing lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.081951] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance network_info: |[{"id": "75018c11-aa90-4c64-a393-593a8f540341", "address": "fa:16:3e:6f:a6:93", "network": {"id": "f76f1eb0-e589-4f30-9c3a-82dae74ca911", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-694386252-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb929cfdc6f047fe9cb26a1527ff9d94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75018c11-aa", "ovs_interfaceid": "75018c11-aa90-4c64-a393-593a8f540341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1350.082670] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:a6:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab0428e-1be7-475e-80e3-1f0aa08d4f86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75018c11-aa90-4c64-a393-593a8f540341', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.090241] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Creating folder: Project (fb929cfdc6f047fe9cb26a1527ff9d94). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1350.090731] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b75ff215-7513-4506-b808-1e8c01fe358a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.100791] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Created folder: Project (fb929cfdc6f047fe9cb26a1527ff9d94) in parent group-v623108. [ 1350.100969] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Creating folder: Instances. Parent ref: group-v623191. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1350.101192] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c2fb303-4273-4629-87b6-95b5309cc8c5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.109832] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Created folder: Instances in parent group-v623191. [ 1350.110080] env[67015]: DEBUG oslo.service.loopingcall [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.110264] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1350.110447] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3874d368-f435-4b41-9b71-69d2152a17c2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.129574] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.129574] env[67015]: value = "task-3114502" [ 1350.129574] env[67015]: _type = "Task" [ 1350.129574] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.136379] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114502, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.639796] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114502, 'name': CreateVM_Task, 'duration_secs': 0.275348} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.640041] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1350.640771] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.640982] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.641338] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.641579] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2997c87-17e8-400e-b79e-194a49171253 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.645850] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for the task: (returnval){ [ 1350.645850] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5208a1f3-94dc-c115-82e7-cc0646ec5861" [ 1350.645850] env[67015]: _type = "Task" [ 1350.645850] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.653414] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5208a1f3-94dc-c115-82e7-cc0646ec5861, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.155897] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.156283] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.156359] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.874686] env[67015]: DEBUG nova.compute.manager [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Received event network-changed-75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1351.874885] env[67015]: DEBUG nova.compute.manager [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Refreshing instance network info cache due to event network-changed-75018c11-aa90-4c64-a393-593a8f540341. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1351.875123] env[67015]: DEBUG oslo_concurrency.lockutils [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] Acquiring lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.875329] env[67015]: DEBUG oslo_concurrency.lockutils [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] Acquired lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.875427] env[67015]: DEBUG nova.network.neutron [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Refreshing network info cache for port 75018c11-aa90-4c64-a393-593a8f540341 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1352.209390] env[67015]: DEBUG nova.network.neutron [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Updated VIF entry in instance network info cache for port 75018c11-aa90-4c64-a393-593a8f540341. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1352.209739] env[67015]: DEBUG nova.network.neutron [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Updating instance_info_cache with network_info: [{"id": "75018c11-aa90-4c64-a393-593a8f540341", "address": "fa:16:3e:6f:a6:93", "network": {"id": "f76f1eb0-e589-4f30-9c3a-82dae74ca911", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-694386252-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb929cfdc6f047fe9cb26a1527ff9d94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75018c11-aa", "ovs_interfaceid": "75018c11-aa90-4c64-a393-593a8f540341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.219264] env[67015]: DEBUG oslo_concurrency.lockutils [req-61db0db0-2303-48b1-b903-0f1fa6d83f3a req-25b7a538-c7df-48e7-8985-9c954e3eb3b4 service nova] Releasing lock "refresh_cache-1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.951969] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.589832] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.590142] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.514333] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.509198] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1368.514979] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1368.514979] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.514314] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.514660] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.527176] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.527394] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.527564] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.527720] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1371.528846] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b545b717-2b58-433d-96c5-f2bcdbd5ab6e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.537630] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f633f1-2b8a-401c-abc3-05cd9f9472e7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.551288] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb2c7ae-8e58-4118-b042-08b6b339a4f4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.557212] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fad28b-46c9-45e0-91aa-1e0102696509 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.584875] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181060MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1371.585054] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.585260] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.662491] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 843278e1-7d76-4f50-8170-9e335d29326e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.662655] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.662786] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.662911] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663045] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663173] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663294] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663410] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663526] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.663638] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.673799] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance daa757b3-3ad6-477f-a7e9-b81a863e9a8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.684563] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 01c3904e-b014-4f3a-8647-ba5fb786d960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.695074] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.709244] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 55346ba0-b93e-489f-8b89-640b7e33e384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.718876] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.728551] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b7b12357-09c4-402f-bf1c-f8872d86d17b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.737594] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.746072] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.746293] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1371.746456] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1371.949179] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083003f8-0a9f-4dd5-9d48-c25168e77058 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.956959] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0509c775-89b1-4059-86d2-12e124f1ccb2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.986673] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc27ff0f-a280-4783-996c-3a2797907116 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.993834] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342f074a-2b7e-499c-93cc-45dd9cec5595 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.006749] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.016238] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1372.030307] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1372.030514] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.445s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.026066] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.049040] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.049279] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.049460] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1373.318407] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.514220] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.514539] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1376.514539] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1376.534047] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534047] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534181] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534270] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534391] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534511] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534661] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534745] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534861] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.534979] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1376.535114] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1384.117549] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "199b0508-5b88-41b4-ae08-dcdabb656686" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.117815] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.083935] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.084309] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.436392] env[67015]: WARNING oslo_vmware.rw_handles [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1394.436392] env[67015]: ERROR oslo_vmware.rw_handles [ 1394.436946] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1394.439045] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1394.439315] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Copying Virtual Disk [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/e14fa0e6-38b8-42f7-aea2-a2ff14625dcd/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1394.439612] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9551942f-5113-456d-bb38-b6c1b1a23131 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.448123] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for the task: (returnval){ [ 1394.448123] env[67015]: value = "task-3114503" [ 1394.448123] env[67015]: _type = "Task" [ 1394.448123] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.456331] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Task: {'id': task-3114503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.958961] env[67015]: DEBUG oslo_vmware.exceptions [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1394.959347] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.959952] env[67015]: ERROR nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1394.959952] env[67015]: Faults: ['InvalidArgument'] [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Traceback (most recent call last): [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] yield resources [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self.driver.spawn(context, instance, image_meta, [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self._fetch_image_if_missing(context, vi) [ 1394.959952] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] image_cache(vi, tmp_image_ds_loc) [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] vm_util.copy_virtual_disk( [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] session._wait_for_task(vmdk_copy_task) [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return self.wait_for_task(task_ref) [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return evt.wait() [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] result = hub.switch() [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1394.960270] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return self.greenlet.switch() [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self.f(*self.args, **self.kw) [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] raise exceptions.translate_fault(task_info.error) [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Faults: ['InvalidArgument'] [ 1394.960610] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] [ 1394.960610] env[67015]: INFO nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Terminating instance [ 1394.961954] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.962164] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.962576] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3d54f31-4692-41e2-b3eb-830ef7340d8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.964474] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1394.964667] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1394.965374] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d610fca-89ee-4796-b259-496141190002 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.971963] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1394.972183] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cca8883b-5d96-41af-ad31-441de7692fc5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.974339] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.974510] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1394.975486] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ec18cf-86da-49fd-8b0f-1484a77ed945 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.980862] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for the task: (returnval){ [ 1394.980862] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52ea3107-823c-c7e9-9f6c-4f1adc05c002" [ 1394.980862] env[67015]: _type = "Task" [ 1394.980862] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.987934] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52ea3107-823c-c7e9-9f6c-4f1adc05c002, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.038665] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1395.038883] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1395.039081] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Deleting the datastore file [datastore2] 843278e1-7d76-4f50-8170-9e335d29326e {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.039423] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7379a28b-6a73-4f51-8e02-4378ae4a681b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.045171] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for the task: (returnval){ [ 1395.045171] env[67015]: value = "task-3114505" [ 1395.045171] env[67015]: _type = "Task" [ 1395.045171] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.052910] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Task: {'id': task-3114505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.492926] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1395.493266] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Creating directory with path [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.493430] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7375b94-0a72-4159-a44d-d3f720c188bb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.505185] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Created directory with path [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.505319] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Fetch image to [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1395.505422] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1395.506197] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373befed-c74d-4603-8597-1acf5b837bf7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.512940] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6724eba4-0f9d-4494-b440-6eadf398de1f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.521715] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e0dab5-c9f5-498f-a531-31a5ea9ed728 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.555859] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da3a7d4-3ba3-4480-8a0c-1734944200a6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.562704] env[67015]: DEBUG oslo_vmware.api [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Task: {'id': task-3114505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083133} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.564138] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.564338] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1395.564512] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1395.564683] env[67015]: INFO nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1395.566488] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5a94dbf7-7309-488b-a307-a16111ae02a4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.568346] env[67015]: DEBUG nova.compute.claims [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1395.568527] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.568739] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.593314] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1395.651071] env[67015]: DEBUG oslo_vmware.rw_handles [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1395.713498] env[67015]: DEBUG oslo_vmware.rw_handles [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1395.713691] env[67015]: DEBUG oslo_vmware.rw_handles [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1395.876035] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf5296c-275c-44a3-892a-b109bed4f242 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.883415] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9a4eb7-8fcd-44e6-9332-8e3333db2f8c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.913566] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba913c3-c9d9-40ba-aa74-1cb8d88844f6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.920157] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96024411-156d-4f5b-95fa-dd2efd2a3e3d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.932660] env[67015]: DEBUG nova.compute.provider_tree [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.941295] env[67015]: DEBUG nova.scheduler.client.report [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1395.975211] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.975769] env[67015]: ERROR nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1395.975769] env[67015]: Faults: ['InvalidArgument'] [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Traceback (most recent call last): [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self.driver.spawn(context, instance, image_meta, [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self._fetch_image_if_missing(context, vi) [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] image_cache(vi, tmp_image_ds_loc) [ 1395.975769] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] vm_util.copy_virtual_disk( [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] session._wait_for_task(vmdk_copy_task) [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return self.wait_for_task(task_ref) [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return evt.wait() [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] result = hub.switch() [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] return self.greenlet.switch() [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1395.976178] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] self.f(*self.args, **self.kw) [ 1395.976547] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1395.976547] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] raise exceptions.translate_fault(task_info.error) [ 1395.976547] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1395.976547] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Faults: ['InvalidArgument'] [ 1395.976547] env[67015]: ERROR nova.compute.manager [instance: 843278e1-7d76-4f50-8170-9e335d29326e] [ 1395.976677] env[67015]: DEBUG nova.compute.utils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1395.978013] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Build of instance 843278e1-7d76-4f50-8170-9e335d29326e was re-scheduled: A specified parameter was not correct: fileType [ 1395.978013] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1395.978517] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1395.978726] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1395.978921] env[67015]: DEBUG nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1395.979220] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1396.516236] env[67015]: DEBUG nova.network.neutron [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.530022] env[67015]: INFO nova.compute.manager [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Took 0.55 seconds to deallocate network for instance. [ 1396.624141] env[67015]: INFO nova.scheduler.client.report [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Deleted allocations for instance 843278e1-7d76-4f50-8170-9e335d29326e [ 1396.646983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-83c42f16-d7a9-404d-b1b6-fa8250062170 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.839s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.646983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.969s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.646983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Acquiring lock "843278e1-7d76-4f50-8170-9e335d29326e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.647168] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.647168] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.649416] env[67015]: INFO nova.compute.manager [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Terminating instance [ 1396.651542] env[67015]: DEBUG nova.compute.manager [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1396.652376] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1396.652376] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aef11469-0a2b-4dfd-8f60-7b9f31dbed2d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.663991] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6825eea5-c8b2-486d-a7f2-1eae9d0495eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.677018] env[67015]: DEBUG nova.compute.manager [None req-c0de6377-9867-460e-8fa8-d1a6de030cb0 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: daa757b3-3ad6-477f-a7e9-b81a863e9a8c] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1396.701523] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 843278e1-7d76-4f50-8170-9e335d29326e could not be found. [ 1396.701523] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1396.701523] env[67015]: INFO nova.compute.manager [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1396.701523] env[67015]: DEBUG oslo.service.loopingcall [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.701523] env[67015]: DEBUG nova.compute.manager [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1396.701712] env[67015]: DEBUG nova.network.neutron [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1396.705697] env[67015]: DEBUG nova.compute.manager [None req-c0de6377-9867-460e-8fa8-d1a6de030cb0 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: daa757b3-3ad6-477f-a7e9-b81a863e9a8c] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1396.731860] env[67015]: DEBUG nova.network.neutron [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.742183] env[67015]: INFO nova.compute.manager [-] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] Took 0.04 seconds to deallocate network for instance. [ 1396.757148] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c0de6377-9867-460e-8fa8-d1a6de030cb0 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "daa757b3-3ad6-477f-a7e9-b81a863e9a8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.628s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.765642] env[67015]: DEBUG nova.compute.manager [None req-39fdf1ce-72f4-4618-8dc5-a8f91ca7b493 tempest-ServerAddressesTestJSON-1912094255 tempest-ServerAddressesTestJSON-1912094255-project-member] [instance: 01c3904e-b014-4f3a-8647-ba5fb786d960] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1396.792936] env[67015]: DEBUG nova.compute.manager [None req-39fdf1ce-72f4-4618-8dc5-a8f91ca7b493 tempest-ServerAddressesTestJSON-1912094255 tempest-ServerAddressesTestJSON-1912094255-project-member] [instance: 01c3904e-b014-4f3a-8647-ba5fb786d960] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1396.813764] env[67015]: DEBUG oslo_concurrency.lockutils [None req-39fdf1ce-72f4-4618-8dc5-a8f91ca7b493 tempest-ServerAddressesTestJSON-1912094255 tempest-ServerAddressesTestJSON-1912094255-project-member] Lock "01c3904e-b014-4f3a-8647-ba5fb786d960" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.152s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.821907] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1396.856934] env[67015]: DEBUG oslo_concurrency.lockutils [None req-81c6f8a7-e967-42f1-bf29-0ff6e96b0a28 tempest-ListServersNegativeTestJSON-1616117717 tempest-ListServersNegativeTestJSON-1616117717-project-member] Lock "843278e1-7d76-4f50-8170-9e335d29326e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.857768] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "843278e1-7d76-4f50-8170-9e335d29326e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 226.043s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.857954] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 843278e1-7d76-4f50-8170-9e335d29326e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1396.858145] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "843278e1-7d76-4f50-8170-9e335d29326e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.877477] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.877713] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.879171] env[67015]: INFO nova.compute.claims [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.109779] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac03dbae-2065-4c4a-86f6-6722c80ef386 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.117666] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55e9cd3-19f6-4a2c-b4dc-1ce6850c5012 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.147624] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fd0fdd-73f7-473d-a4b1-a4171099305f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.154544] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7634ea8-e5ab-462b-bede-1af4b9e417f7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.167430] env[67015]: DEBUG nova.compute.provider_tree [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.176713] env[67015]: DEBUG nova.scheduler.client.report [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1397.190506] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.313s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.191029] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1397.223039] env[67015]: DEBUG nova.compute.utils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.223743] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1397.223915] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1397.233144] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1397.298504] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1397.323912] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1397.324220] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1397.324401] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.324611] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1397.324767] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.324915] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1397.325175] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1397.325341] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1397.325510] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1397.325674] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1397.325849] env[67015]: DEBUG nova.virt.hardware [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1397.326775] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41799cae-1c58-49ea-a3e5-f8990781522d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.335065] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b4bdca-5255-4aeb-876e-2125d5774db0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.517126] env[67015]: DEBUG nova.policy [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c296928b2fac410abe4cf22099518f74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65b8edd6f91443aa8051a18bbd34cc29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1398.006864] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Successfully created port: c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.715134] env[67015]: DEBUG nova.compute.manager [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Received event network-vif-plugged-c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1398.715393] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] Acquiring lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.715559] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.715727] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.715893] env[67015]: DEBUG nova.compute.manager [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] No waiting events found dispatching network-vif-plugged-c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1398.719064] env[67015]: WARNING nova.compute.manager [req-1b2b5ab1-5bfb-481b-b383-f6e7160c2858 req-b17d540e-8855-42da-8e13-4934b00f80e9 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Received unexpected event network-vif-plugged-c446b58f-cafd-452b-bdf4-2da311d2502b for instance with vm_state building and task_state spawning. [ 1398.793093] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Successfully updated port: c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1398.806995] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.807355] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.807355] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1398.882404] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1399.108507] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "6d104e2f-9924-4094-823d-a78c21acfc7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.136834] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updating instance_info_cache with network_info: [{"id": "c446b58f-cafd-452b-bdf4-2da311d2502b", "address": "fa:16:3e:06:c2:ea", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc446b58f-ca", "ovs_interfaceid": "c446b58f-cafd-452b-bdf4-2da311d2502b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.148861] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.148861] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance network_info: |[{"id": "c446b58f-cafd-452b-bdf4-2da311d2502b", "address": "fa:16:3e:06:c2:ea", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc446b58f-ca", "ovs_interfaceid": "c446b58f-cafd-452b-bdf4-2da311d2502b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1399.149221] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:c2:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c446b58f-cafd-452b-bdf4-2da311d2502b', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.157482] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating folder: Project (65b8edd6f91443aa8051a18bbd34cc29). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1399.157956] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c4bcbeb-5a95-42d7-862c-49dbe3d48657 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.167798] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created folder: Project (65b8edd6f91443aa8051a18bbd34cc29) in parent group-v623108. [ 1399.168008] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating folder: Instances. Parent ref: group-v623194. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1399.168245] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f8771cc-2c5d-4a23-a3f5-3104a74193b8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.177364] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created folder: Instances in parent group-v623194. [ 1399.177589] env[67015]: DEBUG oslo.service.loopingcall [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.177766] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1399.177948] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb1056c4-9f71-4ee2-ad1a-a30910766711 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.195857] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.195857] env[67015]: value = "task-3114508" [ 1399.195857] env[67015]: _type = "Task" [ 1399.195857] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.203086] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114508, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.705498] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114508, 'name': CreateVM_Task, 'duration_secs': 0.28563} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.705685] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1399.706412] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.706598] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.706928] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.707196] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87cca77c-d261-44f5-8101-309ce6853c80 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.711459] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1399.711459] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52cda84c-3aff-71e5-41dd-b368f4eb0781" [ 1399.711459] env[67015]: _type = "Task" [ 1399.711459] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.719191] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52cda84c-3aff-71e5-41dd-b368f4eb0781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.221613] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.221886] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.222115] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.744372] env[67015]: DEBUG nova.compute.manager [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Received event network-changed-c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1400.744606] env[67015]: DEBUG nova.compute.manager [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Refreshing instance network info cache due to event network-changed-c446b58f-cafd-452b-bdf4-2da311d2502b. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1400.744760] env[67015]: DEBUG oslo_concurrency.lockutils [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] Acquiring lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.744892] env[67015]: DEBUG oslo_concurrency.lockutils [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] Acquired lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.745069] env[67015]: DEBUG nova.network.neutron [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Refreshing network info cache for port c446b58f-cafd-452b-bdf4-2da311d2502b {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1401.300794] env[67015]: DEBUG nova.network.neutron [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updated VIF entry in instance network info cache for port c446b58f-cafd-452b-bdf4-2da311d2502b. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1401.301245] env[67015]: DEBUG nova.network.neutron [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updating instance_info_cache with network_info: [{"id": "c446b58f-cafd-452b-bdf4-2da311d2502b", "address": "fa:16:3e:06:c2:ea", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc446b58f-ca", "ovs_interfaceid": "c446b58f-cafd-452b-bdf4-2da311d2502b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.310412] env[67015]: DEBUG oslo_concurrency.lockutils [req-257a244b-d51d-407d-a0dc-4cc202bc4936 req-7d71fe8c-67e8-4878-860a-fd9b78625d97 service nova] Releasing lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.759585] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d5f58724-5684-46f0-9573-5ec08310db46 tempest-ServersListShow296Test-2116193991 tempest-ServersListShow296Test-2116193991-project-member] Acquiring lock "03d60eff-5f68-4c04-83e4-926c0ac3103a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.759934] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d5f58724-5684-46f0-9573-5ec08310db46 tempest-ServersListShow296Test-2116193991 tempest-ServersListShow296Test-2116193991-project-member] Lock "03d60eff-5f68-4c04-83e4-926c0ac3103a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.315049] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78af0ae1-30e7-42a2-a8a8-0691615ba325 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "210c7815-0a29-47a6-a4c0-5e7f9ca5abf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.315317] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78af0ae1-30e7-42a2-a8a8-0691615ba325 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "210c7815-0a29-47a6-a4c0-5e7f9ca5abf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.514685] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.515154] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.515573] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.515573] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1428.526280] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1429.520661] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.514601] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.513896] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.514548] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.526928] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.527134] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.527308] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.527465] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1432.528634] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548cfcd7-e860-44d4-82b8-b635205f1ee6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.537096] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfae5fe-43a1-43ae-8f98-86fff4ff7c78 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.550468] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8b76af-943d-4dc5-8ef4-89ece42a7133 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.556469] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d16b864-33ef-409c-9882-7bd9645daa0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.584694] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181061MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1432.584843] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.585090] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.732227] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance db3de804-63b7-4887-b752-282e70e0f20e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.732399] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.732528] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 437b21d5-932d-4216-b7f7-17c6eab2665f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.732651] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.732771] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.732885] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.733011] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.733138] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.733254] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.733368] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.750230] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.760975] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance b7b12357-09c4-402f-bf1c-f8872d86d17b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.771159] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.780523] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.789729] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.802960] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.813273] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 03d60eff-5f68-4c04-83e4-926c0ac3103a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.824310] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.824540] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1432.824773] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1432.840418] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1432.856614] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1432.856861] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1432.868062] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1432.886756] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1433.088305] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828ea665-46e7-4e3d-9dcb-042a62947067 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.095545] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b670f892-7905-4ccc-96d3-d91797e8f6eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.124501] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d280ae8-e26e-4744-b091-747d677c587a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.131063] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dcf6b2-591b-4e95-b405-a2ab16c59887 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.143503] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.152050] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1433.166760] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1433.166936] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.582s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.167331] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.513611] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.513793] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1438.514955] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.515422] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1438.515422] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1438.540350] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.540842] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.540842] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.540959] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.541131] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.541315] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.541493] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.541669] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.541841] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.542076] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1438.542193] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1438.542898] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.543101] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1438.553921] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.927448] env[67015]: WARNING oslo_vmware.rw_handles [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1440.927448] env[67015]: ERROR oslo_vmware.rw_handles [ 1440.928077] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1440.929994] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1440.930262] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Copying Virtual Disk [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/7619dd1d-a5a9-49e9-b5c8-8e7617ef571d/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1440.930575] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb3afc6d-56d9-4198-afbc-c0dc45a256d7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.939339] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for the task: (returnval){ [ 1440.939339] env[67015]: value = "task-3114509" [ 1440.939339] env[67015]: _type = "Task" [ 1440.939339] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.947015] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Task: {'id': task-3114509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.448961] env[67015]: DEBUG oslo_vmware.exceptions [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1441.449298] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.449886] env[67015]: ERROR nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1441.449886] env[67015]: Faults: ['InvalidArgument'] [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] Traceback (most recent call last): [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] yield resources [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self.driver.spawn(context, instance, image_meta, [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self._fetch_image_if_missing(context, vi) [ 1441.449886] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] image_cache(vi, tmp_image_ds_loc) [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] vm_util.copy_virtual_disk( [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] session._wait_for_task(vmdk_copy_task) [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return self.wait_for_task(task_ref) [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return evt.wait() [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] result = hub.switch() [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1441.450248] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return self.greenlet.switch() [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self.f(*self.args, **self.kw) [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] raise exceptions.translate_fault(task_info.error) [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] Faults: ['InvalidArgument'] [ 1441.450657] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] [ 1441.450657] env[67015]: INFO nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Terminating instance [ 1441.452246] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.452246] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.452526] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-baae93e3-0da3-4ab1-aebb-815fc4901a62 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.454630] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1441.454867] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1441.455603] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a81c902-c271-4029-b34d-1026546b976f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.462580] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1441.462822] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe5fd9c2-1da1-4b12-8695-9a0e85e4ef28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.464856] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.465087] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1441.466122] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9f34752-03b6-4a86-8cbd-0e45cfe35224 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.471333] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1441.471333] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]521e6111-c81c-ba12-167a-be60f0a2e921" [ 1441.471333] env[67015]: _type = "Task" [ 1441.471333] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.478538] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]521e6111-c81c-ba12-167a-be60f0a2e921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.525701] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1441.526050] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1441.526320] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Deleting the datastore file [datastore2] db3de804-63b7-4887-b752-282e70e0f20e {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.526650] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72ef5c8f-8ee8-46ce-8e32-4a743bbc38c6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.532771] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for the task: (returnval){ [ 1441.532771] env[67015]: value = "task-3114511" [ 1441.532771] env[67015]: _type = "Task" [ 1441.532771] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.540195] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Task: {'id': task-3114511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.982851] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1441.983122] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating directory with path [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.983415] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd926161-59a5-4c9c-a32f-6161d7e90446 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.995464] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created directory with path [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.995681] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Fetch image to [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1441.995888] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1441.996625] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56109c57-811e-4c36-8aff-f3c583c5d313 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.003055] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e51ae98-5e12-40cd-88c9-63d231fc2dcc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.012086] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be386b14-d44a-4e3a-aaa4-2f4cb81ad54f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.046114] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083ceb5c-3cb0-460d-876f-47942f7097f5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.055111] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-845d40b5-8782-48dd-bfca-c2fbf533cc54 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.056858] env[67015]: DEBUG oslo_vmware.api [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Task: {'id': task-3114511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068888} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.057447] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.057447] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1442.057447] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1442.057640] env[67015]: INFO nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1442.059834] env[67015]: DEBUG nova.compute.claims [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1442.060011] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.060236] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.079492] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1442.141019] env[67015]: DEBUG oslo_vmware.rw_handles [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1442.200595] env[67015]: DEBUG oslo_vmware.rw_handles [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1442.200799] env[67015]: DEBUG oslo_vmware.rw_handles [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1442.357519] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0283476-204a-4cd1-bb9e-dd6be1991bcd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.365164] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401c5d8a-6198-46ec-a211-a1849f17b2e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.394178] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7957b2-70dc-4620-adf3-4ef7b962c2dc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.400840] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c064f8c-e997-4b13-a6a6-5c5b1f610e32 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.413289] env[67015]: DEBUG nova.compute.provider_tree [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.421544] env[67015]: DEBUG nova.scheduler.client.report [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1442.435166] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.375s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.436086] env[67015]: ERROR nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1442.436086] env[67015]: Faults: ['InvalidArgument'] [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] Traceback (most recent call last): [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self.driver.spawn(context, instance, image_meta, [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self._fetch_image_if_missing(context, vi) [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] image_cache(vi, tmp_image_ds_loc) [ 1442.436086] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] vm_util.copy_virtual_disk( [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] session._wait_for_task(vmdk_copy_task) [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return self.wait_for_task(task_ref) [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return evt.wait() [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] result = hub.switch() [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] return self.greenlet.switch() [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1442.436399] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] self.f(*self.args, **self.kw) [ 1442.436731] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1442.436731] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] raise exceptions.translate_fault(task_info.error) [ 1442.436731] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1442.436731] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] Faults: ['InvalidArgument'] [ 1442.436731] env[67015]: ERROR nova.compute.manager [instance: db3de804-63b7-4887-b752-282e70e0f20e] [ 1442.436731] env[67015]: DEBUG nova.compute.utils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1442.437912] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Build of instance db3de804-63b7-4887-b752-282e70e0f20e was re-scheduled: A specified parameter was not correct: fileType [ 1442.437912] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1442.438311] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1442.438486] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1442.438724] env[67015]: DEBUG nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1442.438904] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1442.872122] env[67015]: DEBUG nova.network.neutron [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.883210] env[67015]: INFO nova.compute.manager [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Took 0.44 seconds to deallocate network for instance. [ 1442.985907] env[67015]: INFO nova.scheduler.client.report [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Deleted allocations for instance db3de804-63b7-4887-b752-282e70e0f20e [ 1443.008527] env[67015]: DEBUG oslo_concurrency.lockutils [None req-daa1ae94-dfc1-477c-be5c-bee4acb65c71 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.506s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.009781] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.163s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.010031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "db3de804-63b7-4887-b752-282e70e0f20e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.010271] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.010445] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.013157] env[67015]: INFO nova.compute.manager [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Terminating instance [ 1443.015948] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquiring lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.016154] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Acquired lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.016803] env[67015]: DEBUG nova.network.neutron [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1443.021932] env[67015]: DEBUG nova.compute.manager [None req-cfbb2e8d-aa60-44b6-b153-a8a1a642f04b tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 55346ba0-b93e-489f-8b89-640b7e33e384] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1443.043576] env[67015]: DEBUG nova.network.neutron [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1443.046261] env[67015]: DEBUG nova.compute.manager [None req-cfbb2e8d-aa60-44b6-b153-a8a1a642f04b tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 55346ba0-b93e-489f-8b89-640b7e33e384] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1443.070221] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cfbb2e8d-aa60-44b6-b153-a8a1a642f04b tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "55346ba0-b93e-489f-8b89-640b7e33e384" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.243s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.084230] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1443.133427] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.133684] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.135196] env[67015]: INFO nova.compute.claims [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1443.376800] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2437fa1d-c9fa-493f-adc1-65a421426f26 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.384268] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1293eea6-d514-40d2-b877-ee5cb6b96a6e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.413939] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acb283d-e938-4d55-822c-23a9a51b8f12 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.420694] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83414315-c51b-485f-85f3-6ae90b324772 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.434280] env[67015]: DEBUG nova.compute.provider_tree [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1443.439019] env[67015]: DEBUG nova.network.neutron [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.446922] env[67015]: DEBUG nova.scheduler.client.report [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1443.450287] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Releasing lock "refresh_cache-db3de804-63b7-4887-b752-282e70e0f20e" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.450752] env[67015]: DEBUG nova.compute.manager [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1443.450852] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1443.451431] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-babb1b71-f4a1-47df-b0b7-de83ae0f3612 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.459726] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaa47c5-0295-43ba-878c-8bdb21d045ba {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.472891] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.339s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.473377] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1443.495021] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db3de804-63b7-4887-b752-282e70e0f20e could not be found. [ 1443.495021] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1443.495021] env[67015]: INFO nova.compute.manager [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1443.495021] env[67015]: DEBUG oslo.service.loopingcall [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1443.495021] env[67015]: DEBUG nova.compute.manager [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1443.495398] env[67015]: DEBUG nova.network.neutron [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1443.509058] env[67015]: DEBUG nova.compute.utils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.510484] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1443.510484] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1443.519044] env[67015]: DEBUG nova.network.neutron [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1443.526801] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1443.529857] env[67015]: DEBUG nova.network.neutron [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.538095] env[67015]: INFO nova.compute.manager [-] [instance: db3de804-63b7-4887-b752-282e70e0f20e] Took 0.04 seconds to deallocate network for instance. [ 1443.583638] env[67015]: DEBUG nova.policy [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee8a56b7100b45be9cc7d3c97341051f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105905a257424bb5adffc9b70943494d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1443.618806] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1443.646651] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1443.646846] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1443.647131] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1443.647207] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1443.647350] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1443.647527] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1443.647832] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1443.647997] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1443.648179] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1443.648344] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1443.648524] env[67015]: DEBUG nova.virt.hardware [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1443.649400] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7451e27d-f89e-4c24-a0d6-ef9aa51ccfa8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.657791] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78613c49-4498-492b-b07c-006a0eaa5aac {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.664949] env[67015]: DEBUG oslo_concurrency.lockutils [None req-943d3af3-3135-4a6a-b373-df400fd103f8 tempest-ServerRescueNegativeTestJSON-1996315238 tempest-ServerRescueNegativeTestJSON-1996315238-project-member] Lock "db3de804-63b7-4887-b752-282e70e0f20e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.655s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.665800] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "db3de804-63b7-4887-b752-282e70e0f20e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 272.851s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.666340] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: db3de804-63b7-4887-b752-282e70e0f20e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1443.666340] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "db3de804-63b7-4887-b752-282e70e0f20e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.926821] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Successfully created port: bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.409295] env[67015]: DEBUG nova.compute.manager [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Received event network-vif-plugged-bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1444.409930] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] Acquiring lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.410320] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.410694] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.411041] env[67015]: DEBUG nova.compute.manager [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] No waiting events found dispatching network-vif-plugged-bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1444.411377] env[67015]: WARNING nova.compute.manager [req-ea140fca-10b7-43dc-a518-bb54f5cec407 req-fdeb6453-b6aa-4e52-9bf5-25a0a476e5d1 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Received unexpected event network-vif-plugged-bccf35de-ed48-4dc0-b4d8-aac08d663912 for instance with vm_state building and task_state spawning. [ 1444.478215] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Successfully updated port: bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.493694] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.493694] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.493694] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1444.535541] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1444.703542] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Updating instance_info_cache with network_info: [{"id": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "address": "fa:16:3e:f3:81:c2", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbccf35de-ed", "ovs_interfaceid": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.719048] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.720030] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance network_info: |[{"id": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "address": "fa:16:3e:f3:81:c2", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbccf35de-ed", "ovs_interfaceid": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1444.720432] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:81:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bccf35de-ed48-4dc0-b4d8-aac08d663912', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.729555] env[67015]: DEBUG oslo.service.loopingcall [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.730516] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1444.730764] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e00ec2fb-bcee-434c-8d17-3bd1334ae32a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.753431] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1444.753431] env[67015]: value = "task-3114512" [ 1444.753431] env[67015]: _type = "Task" [ 1444.753431] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.761671] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114512, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.263889] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114512, 'name': CreateVM_Task, 'duration_secs': 0.285786} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.264109] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1445.264716] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.264884] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.265229] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.265473] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f207e0b1-7d41-4391-adea-2c66d119cde6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.269940] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1445.269940] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52c7ea41-570c-0bed-4138-b88cfbbd6887" [ 1445.269940] env[67015]: _type = "Task" [ 1445.269940] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.277310] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52c7ea41-570c-0bed-4138-b88cfbbd6887, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.782046] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.782330] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1445.782330] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.431545] env[67015]: DEBUG nova.compute.manager [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Received event network-changed-bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1446.431792] env[67015]: DEBUG nova.compute.manager [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Refreshing instance network info cache due to event network-changed-bccf35de-ed48-4dc0-b4d8-aac08d663912. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1446.432023] env[67015]: DEBUG oslo_concurrency.lockutils [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] Acquiring lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.432170] env[67015]: DEBUG oslo_concurrency.lockutils [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] Acquired lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.432333] env[67015]: DEBUG nova.network.neutron [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Refreshing network info cache for port bccf35de-ed48-4dc0-b4d8-aac08d663912 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1446.677210] env[67015]: DEBUG nova.network.neutron [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Updated VIF entry in instance network info cache for port bccf35de-ed48-4dc0-b4d8-aac08d663912. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1446.677515] env[67015]: DEBUG nova.network.neutron [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Updating instance_info_cache with network_info: [{"id": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "address": "fa:16:3e:f3:81:c2", "network": {"id": "7dde7810-c31d-4e36-9427-9a4405525e82", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1963602411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105905a257424bb5adffc9b70943494d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbccf35de-ed", "ovs_interfaceid": "bccf35de-ed48-4dc0-b4d8-aac08d663912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.686766] env[67015]: DEBUG oslo_concurrency.lockutils [req-c91dd21c-7d18-4e2c-99ea-6fca9a6175df req-6f0a55a5-db56-4e89-8c9e-7a7d573c33d6 service nova] Releasing lock "refresh_cache-944728f0-7db6-4cca-a51c-7acb5998cb12" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.736228] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "944728f0-7db6-4cca-a51c-7acb5998cb12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.463031] env[67015]: WARNING oslo_vmware.rw_handles [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1488.463031] env[67015]: ERROR oslo_vmware.rw_handles [ 1488.463769] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1488.465293] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1488.465536] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Copying Virtual Disk [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/f61b0d39-a836-4093-9b24-074f805bde49/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1488.465816] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a7eb942-c0c7-49fb-b9aa-e216081587cc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.474132] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1488.474132] env[67015]: value = "task-3114513" [ 1488.474132] env[67015]: _type = "Task" [ 1488.474132] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.482095] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.532015] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1488.984651] env[67015]: DEBUG oslo_vmware.exceptions [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1488.984887] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.985472] env[67015]: ERROR nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1488.985472] env[67015]: Faults: ['InvalidArgument'] [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Traceback (most recent call last): [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] yield resources [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self.driver.spawn(context, instance, image_meta, [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self._fetch_image_if_missing(context, vi) [ 1488.985472] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] image_cache(vi, tmp_image_ds_loc) [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] vm_util.copy_virtual_disk( [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] session._wait_for_task(vmdk_copy_task) [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return self.wait_for_task(task_ref) [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return evt.wait() [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] result = hub.switch() [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1488.985858] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return self.greenlet.switch() [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self.f(*self.args, **self.kw) [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] raise exceptions.translate_fault(task_info.error) [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Faults: ['InvalidArgument'] [ 1488.986374] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] [ 1488.986374] env[67015]: INFO nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Terminating instance [ 1488.987504] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.987709] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.987962] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7c96890-e46b-4328-b07e-2090b7434682 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.990189] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1488.990385] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1488.991108] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8933b549-3f5a-4f75-805c-f305b9281e8b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.997923] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1488.998113] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34abdbd8-72b7-4b6a-9c32-646b43eb54a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.000313] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1489.000489] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1489.001438] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81503e3b-a66a-41f4-8219-b94491d8a0b2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.005991] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 1489.005991] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52afb047-8d49-be86-1b27-ea1dddd8cd7b" [ 1489.005991] env[67015]: _type = "Task" [ 1489.005991] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.012980] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52afb047-8d49-be86-1b27-ea1dddd8cd7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.159438] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1489.159699] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1489.159826] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleting the datastore file [datastore2] 8c919afe-37b6-47f0-b939-d9df5800d7ee {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1489.160166] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ae50df0-825b-4b1d-b462-15096c91ba30 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.166216] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1489.166216] env[67015]: value = "task-3114515" [ 1489.166216] env[67015]: _type = "Task" [ 1489.166216] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.173661] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.510834] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.516764] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1489.517115] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Creating directory with path [datastore2] vmware_temp/854807c5-f3bc-4f05-b2c6-5822e9384c50/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1489.517360] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0e026ea-023f-4011-941e-d5abd8a2e50f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.528528] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Created directory with path [datastore2] vmware_temp/854807c5-f3bc-4f05-b2c6-5822e9384c50/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1489.528726] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Fetch image to [datastore2] vmware_temp/854807c5-f3bc-4f05-b2c6-5822e9384c50/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1489.528898] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/854807c5-f3bc-4f05-b2c6-5822e9384c50/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1489.529654] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fbb890-622a-41f0-be6c-038867dea0e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.535824] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f326ab-6777-4ff6-ada1-51e31fcdb1eb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.544655] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c2ee98-18d8-4591-8f50-e353d632e72f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.576359] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586a840f-2160-4f6c-9c49-93986d924b09 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.581807] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecbc6426-aa5d-40a5-888e-8ce2f95a71b7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.604762] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1489.676236] env[67015]: DEBUG oslo_vmware.api [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076112} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.676491] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1489.676676] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1489.676867] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1489.677132] env[67015]: INFO nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1489.679237] env[67015]: DEBUG nova.compute.claims [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1489.679421] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.679625] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.739244] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.740146] env[67015]: ERROR nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1489.740146] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1489.740617] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] yield resources [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.driver.spawn(context, instance, image_meta, [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._fetch_image_if_missing(context, vi) [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image_fetch(context, vi, tmp_image_ds_loc) [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] images.fetch_image( [ 1489.741150] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] metadata = IMAGE_API.get(context, image_ref) [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return session.show(context, image_id, [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] _reraise_translated_image_exception(image_id) [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise new_exc.with_traceback(exc_trace) [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1489.741500] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1489.741815] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1489.742099] env[67015]: INFO nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Terminating instance [ 1489.742099] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.742298] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1489.742556] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3781997-96ad-43ef-b96e-67080665df74 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.745406] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1489.745620] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1489.746701] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c505d5-fda3-4603-8a74-2d6b374ad6a4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.755803] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1489.756030] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e96c3f1-c133-45eb-9a93-c5726bc5164d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.758186] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1489.758361] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1489.759281] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd9feea1-50c8-43d5-9353-53cc031e19df {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.764760] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for the task: (returnval){ [ 1489.764760] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5276dd2a-27ff-b1ce-5e6c-f13ae34d7da5" [ 1489.764760] env[67015]: _type = "Task" [ 1489.764760] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.771824] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5276dd2a-27ff-b1ce-5e6c-f13ae34d7da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.811201] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1489.811485] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1489.811596] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleting the datastore file [datastore2] 437b21d5-932d-4216-b7f7-17c6eab2665f {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1489.811850] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd578274-7409-4f0a-bc7e-7b1e463a27a7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.820575] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for the task: (returnval){ [ 1489.820575] env[67015]: value = "task-3114517" [ 1489.820575] env[67015]: _type = "Task" [ 1489.820575] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.828039] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': task-3114517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.924657] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa20ba3-a40a-46e0-b73c-274523a0b925 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.931694] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e2fcf5-f7bc-44cd-afa2-f8f0c7ee757b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.961632] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3510f7b0-6a75-44ea-af08-499b8a527203 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.968381] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c8764f-1f99-4954-bb85-b7c147e4b9ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.980871] env[67015]: DEBUG nova.compute.provider_tree [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.990030] env[67015]: DEBUG nova.scheduler.client.report [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1490.005589] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.326s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.006152] env[67015]: ERROR nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1490.006152] env[67015]: Faults: ['InvalidArgument'] [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Traceback (most recent call last): [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self.driver.spawn(context, instance, image_meta, [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self._fetch_image_if_missing(context, vi) [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] image_cache(vi, tmp_image_ds_loc) [ 1490.006152] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] vm_util.copy_virtual_disk( [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] session._wait_for_task(vmdk_copy_task) [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return self.wait_for_task(task_ref) [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return evt.wait() [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] result = hub.switch() [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] return self.greenlet.switch() [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1490.006531] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] self.f(*self.args, **self.kw) [ 1490.006905] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1490.006905] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] raise exceptions.translate_fault(task_info.error) [ 1490.006905] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1490.006905] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Faults: ['InvalidArgument'] [ 1490.006905] env[67015]: ERROR nova.compute.manager [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] [ 1490.007076] env[67015]: DEBUG nova.compute.utils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1490.008804] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Build of instance 8c919afe-37b6-47f0-b939-d9df5800d7ee was re-scheduled: A specified parameter was not correct: fileType [ 1490.008804] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1490.009229] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1490.009409] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1490.009581] env[67015]: DEBUG nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1490.009742] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1490.275814] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1490.276079] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Creating directory with path [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.276318] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f1c800f-b1c0-4df1-abfa-cd3105c2f2ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.288303] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Created directory with path [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.288502] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Fetch image to [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1490.288672] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1490.289603] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d48327-3533-4020-b765-d1550b42a462 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.296726] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f98200-4d58-4678-90c2-5d19dd07506d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.305791] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556a7566-0259-4b1a-881f-bba33f34b7ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.341996] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb4493d-9d50-44cd-bd4f-29d5e8e747a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.351686] env[67015]: DEBUG oslo_vmware.api [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Task: {'id': task-3114517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085473} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.352764] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.352892] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1490.353809] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1490.353809] env[67015]: INFO nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1490.355753] env[67015]: DEBUG nova.compute.claims [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1490.355923] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.356155] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.358676] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1c1a64ec-69aa-4e4b-993e-3b90faf26b1a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.361450] env[67015]: DEBUG nova.network.neutron [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.372367] env[67015]: INFO nova.compute.manager [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Took 0.36 seconds to deallocate network for instance. [ 1490.382258] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1490.448952] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1490.513767] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1490.514064] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1490.514386] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.530121] env[67015]: INFO nova.scheduler.client.report [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleted allocations for instance 8c919afe-37b6-47f0-b939-d9df5800d7ee [ 1490.555107] env[67015]: DEBUG oslo_concurrency.lockutils [None req-47133083-9b5e-49d1-b011-758c9bae905b tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.798s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.556232] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.615s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.556461] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.556668] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.556843] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.561138] env[67015]: INFO nova.compute.manager [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Terminating instance [ 1490.562900] env[67015]: DEBUG nova.compute.manager [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1490.563107] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1490.563734] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdbbf61b-7b7c-4718-9641-6c063364d729 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.573016] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1818b062-b0b4-46fe-8946-b1d6c87591e0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.583122] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: b7b12357-09c4-402f-bf1c-f8872d86d17b] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1490.606184] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c919afe-37b6-47f0-b939-d9df5800d7ee could not be found. [ 1490.606289] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1490.606410] env[67015]: INFO nova.compute.manager [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1490.606664] env[67015]: DEBUG oslo.service.loopingcall [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.609311] env[67015]: DEBUG nova.compute.manager [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1490.609427] env[67015]: DEBUG nova.network.neutron [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1490.611828] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: b7b12357-09c4-402f-bf1c-f8872d86d17b] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1490.642793] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "b7b12357-09c4-402f-bf1c-f8872d86d17b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.869s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.644437] env[67015]: DEBUG nova.network.neutron [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.652510] env[67015]: INFO nova.compute.manager [-] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] Took 0.04 seconds to deallocate network for instance. [ 1490.654504] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1490.705318] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.727476] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0e52fd-9e18-4830-9e8c-06a1ce22e037 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.735570] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91a7a73-0329-4de9-9999-4221178e2459 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.767386] env[67015]: DEBUG oslo_concurrency.lockutils [None req-327b186f-13cc-4295-af61-2bbca40e6933 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.211s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.769013] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df6512c-7716-440c-83e4-dba197ac8772 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.771454] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 319.956s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.771651] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8c919afe-37b6-47f0-b939-d9df5800d7ee] During sync_power_state the instance has a pending task (deleting). Skip. [ 1490.771852] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "8c919afe-37b6-47f0-b939-d9df5800d7ee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.778056] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0cadf5-d885-494a-b6d0-0ed77ce1124a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.792385] env[67015]: DEBUG nova.compute.provider_tree [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.805271] env[67015]: DEBUG nova.scheduler.client.report [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1490.818343] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.462s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.819114] env[67015]: ERROR nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1490.819114] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.819413] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.driver.spawn(context, instance, image_meta, [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._fetch_image_if_missing(context, vi) [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image_fetch(context, vi, tmp_image_ds_loc) [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] images.fetch_image( [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] metadata = IMAGE_API.get(context, image_ref) [ 1490.819693] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return session.show(context, image_id, [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] _reraise_translated_image_exception(image_id) [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise new_exc.with_traceback(exc_trace) [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1490.820017] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1490.820329] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.820568] env[67015]: DEBUG nova.compute.utils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1490.820891] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.116s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.822256] env[67015]: INFO nova.compute.claims [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1490.824727] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Build of instance 437b21d5-932d-4216-b7f7-17c6eab2665f was re-scheduled: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1490.825188] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1490.825363] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1490.825519] env[67015]: DEBUG nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1490.825679] env[67015]: DEBUG nova.network.neutron [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1490.935804] env[67015]: DEBUG neutronclient.v2_0.client [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67015) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1490.937087] env[67015]: ERROR nova.compute.manager [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1490.937087] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.937424] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.driver.spawn(context, instance, image_meta, [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._fetch_image_if_missing(context, vi) [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image_fetch(context, vi, tmp_image_ds_loc) [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] images.fetch_image( [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] metadata = IMAGE_API.get(context, image_ref) [ 1490.937748] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return session.show(context, image_id, [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] _reraise_translated_image_exception(image_id) [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise new_exc.with_traceback(exc_trace) [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = getattr(controller, method)(*args, **kwargs) [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._get(image_id) [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1490.938117] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] resp, body = self.http_client.get(url, headers=header) [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.request(url, 'GET', **kwargs) [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self._handle_response(resp) [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exc.from_response(resp, resp.content) [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.ImageNotAuthorized: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.938521] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._build_and_run_instance(context, instance, image, [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exception.RescheduledException( [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.RescheduledException: Build of instance 437b21d5-932d-4216-b7f7-17c6eab2665f was re-scheduled: Not authorized for image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982. [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1490.938941] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] exception_handler_v20(status_code, error_body) [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise client_exc(message=error_message, [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Neutron server returns request_ids: ['req-2906aa6f-a5a3-4281-8b5f-99b6d266d611'] [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._deallocate_network(context, instance, requested_networks) [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.network_api.deallocate_for_instance( [ 1490.939298] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] data = neutron.list_ports(**search_opts) [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.list('ports', self.ports_path, retrieve_all, [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] for r in self._pagination(collection, path, **params): [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] res = self.get(path, params=params) [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.939653] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.retry_request("GET", action, body=body, [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.do_request(method, action, body=body, [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._handle_fault_response(status_code, replybody, resp) [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exception.Unauthorized() [ 1490.940023] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.Unauthorized: Not authorized. [ 1490.940390] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1490.996135] env[67015]: INFO nova.scheduler.client.report [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Deleted allocations for instance 437b21d5-932d-4216-b7f7-17c6eab2665f [ 1491.015460] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78a7c1e1-2eff-4b8e-b810-b28630f30b59 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.780s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.016826] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 385.714s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.017095] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.017329] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.017501] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.019484] env[67015]: INFO nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Terminating instance [ 1491.021102] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquiring lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.021216] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Acquired lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.021384] env[67015]: DEBUG nova.network.neutron [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1491.029612] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1491.061276] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8e89ca-3da9-4dc9-9650-72c311be4901 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.069085] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38b239-9886-4b87-9efd-8cc3217162ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.105295] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4782c1df-567e-4b92-bd23-8cac80ceb52f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.112490] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ecbb5e-82e3-43c4-a614-92449f6b2d44 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.117158] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.126950] env[67015]: DEBUG nova.compute.provider_tree [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.137256] env[67015]: DEBUG nova.scheduler.client.report [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1491.150224] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.329s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.150684] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1491.153010] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.036s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.154445] env[67015]: INFO nova.compute.claims [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1491.182286] env[67015]: DEBUG nova.compute.utils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1491.183644] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1491.184679] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1491.194432] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1491.246300] env[67015]: DEBUG nova.policy [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31948dd4263c489487c7ee70bfe5f90a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6ae460357cb49bebc05c4d993d2fddf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1491.263124] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1491.291781] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.292033] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.292199] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.292918] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.292918] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.292918] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.292918] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.293235] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.293235] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.293333] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.293504] env[67015]: DEBUG nova.virt.hardware [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.294364] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5d1894-127c-4756-a5a0-f7a03423c627 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.303088] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b878a6-4046-40fd-a1bb-58fd4eae8342 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.324186] env[67015]: DEBUG nova.network.neutron [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Updating instance_info_cache with network_info: [{"id": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "address": "fa:16:3e:ad:78:aa", "network": {"id": "7451a549-058d-44bf-acca-7bd945ac5def", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "482f98aba88c4103b6a8d7c7ab5d030d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c9cfe9-56", "ovs_interfaceid": "e6c9cfe9-567f-49bb-ac9b-9cfa97699ac8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.335316] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Releasing lock "refresh_cache-437b21d5-932d-4216-b7f7-17c6eab2665f" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.335723] env[67015]: DEBUG nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1491.335914] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1491.336436] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-970d6cff-3cb7-475e-8866-2a0d7e4f9995 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.347537] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e044ec8-fe62-4c86-961a-26b3e5d5f7d2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.378372] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 437b21d5-932d-4216-b7f7-17c6eab2665f could not be found. [ 1491.378582] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1491.378764] env[67015]: INFO nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1491.379055] env[67015]: DEBUG oslo.service.loopingcall [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.381527] env[67015]: DEBUG nova.compute.manager [-] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1491.381628] env[67015]: DEBUG nova.network.neutron [-] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1491.419634] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02b0482-7044-4042-a076-ecfc3cb2c329 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.427443] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dc9c08-bfde-43e2-b109-30efdbb51108 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.466788] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8a2e89-737a-487c-8a1e-96ce9a497cdb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.475612] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b772508-f8d6-4b4b-aeaa-16935719ddb5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.479275] env[67015]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67015) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1491.479500] env[67015]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-47acf149-64dc-4194-b364-e819f86e1022'] [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1491.480010] env[67015]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1491.480501] env[67015]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.481038] env[67015]: ERROR oslo.service.loopingcall [ 1491.481476] env[67015]: ERROR nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.495689] env[67015]: DEBUG nova.compute.provider_tree [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.506923] env[67015]: DEBUG nova.scheduler.client.report [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1491.512369] env[67015]: ERROR nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] exception_handler_v20(status_code, error_body) [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise client_exc(message=error_message, [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Neutron server returns request_ids: ['req-47acf149-64dc-4194-b364-e819f86e1022'] [ 1491.512369] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During handling of the above exception, another exception occurred: [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Traceback (most recent call last): [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._delete_instance(context, instance, bdms) [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._shutdown_instance(context, instance, bdms) [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._try_deallocate_network(context, instance, requested_networks) [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] with excutils.save_and_reraise_exception(): [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.512714] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.force_reraise() [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise self.value [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] _deallocate_network_with_retries() [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return evt.wait() [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = hub.switch() [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.greenlet.switch() [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1491.513097] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = func(*self.args, **self.kw) [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] result = f(*args, **kwargs) [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._deallocate_network( [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self.network_api.deallocate_for_instance( [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] data = neutron.list_ports(**search_opts) [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.list('ports', self.ports_path, retrieve_all, [ 1491.513460] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] for r in self._pagination(collection, path, **params): [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] res = self.get(path, params=params) [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.retry_request("GET", action, body=body, [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1491.513766] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] return self.do_request(method, action, body=body, [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] ret = obj(*args, **kwargs) [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] self._handle_fault_response(status_code, replybody, resp) [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.514548] env[67015]: ERROR nova.compute.manager [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] [ 1491.516148] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.516393] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.520543] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.521018] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1491.541191] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.524s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.542752] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 320.727s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.542944] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1491.544076] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "437b21d5-932d-4216-b7f7-17c6eab2665f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.567374] env[67015]: DEBUG nova.compute.utils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1491.568771] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1491.568986] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1491.581270] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1491.599938] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Successfully created port: 1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1491.605875] env[67015]: INFO nova.compute.manager [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] [instance: 437b21d5-932d-4216-b7f7-17c6eab2665f] Successfully reverted task state from None on failure for instance. [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server [None req-ff6acf81-42ec-4ead-84ba-7d9bff826b29 tempest-DeleteServersAdminTestJSON-229028277 tempest-DeleteServersAdminTestJSON-229028277-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-47acf149-64dc-4194-b364-e819f86e1022'] [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1491.610605] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1491.611031] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1491.611456] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server raise self.value [ 1491.611868] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1491.612292] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1491.612707] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1491.613119] env[67015]: ERROR oslo_messaging.rpc.server [ 1491.629765] env[67015]: DEBUG nova.policy [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '875c06abda0f4390a12826ea01442df3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4406b2fac7884b1a8e60f467118b923b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1491.653975] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1491.682671] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.682903] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.683087] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.683273] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.684667] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.684667] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.684667] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.684667] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.684667] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.684928] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.684928] env[67015]: DEBUG nova.virt.hardware [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.685303] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7436a67-68fb-42ca-bb11-4d275633d232 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.694599] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e718c4-33d1-4b5e-8506-2b86c31e53ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.996179] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Successfully created port: 8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1492.502057] env[67015]: DEBUG nova.compute.manager [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Received event network-vif-plugged-8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1492.505204] env[67015]: DEBUG oslo_concurrency.lockutils [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] Acquiring lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.505579] env[67015]: DEBUG oslo_concurrency.lockutils [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.505844] env[67015]: DEBUG oslo_concurrency.lockutils [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.506251] env[67015]: DEBUG nova.compute.manager [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] No waiting events found dispatching network-vif-plugged-8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.507025] env[67015]: WARNING nova.compute.manager [req-3adfe2c8-e1c8-470b-aa0b-a7f4f86702aa req-922c6f65-8280-43db-a5a3-b6775e3f192d service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Received unexpected event network-vif-plugged-8d8839ca-40a2-4a14-a5f2-49069fdcbf34 for instance with vm_state building and task_state spawning. [ 1492.510111] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.664271] env[67015]: DEBUG nova.compute.manager [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Received event network-vif-plugged-1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1492.664540] env[67015]: DEBUG oslo_concurrency.lockutils [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] Acquiring lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.664700] env[67015]: DEBUG oslo_concurrency.lockutils [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] Lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.664866] env[67015]: DEBUG oslo_concurrency.lockutils [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] Lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.665049] env[67015]: DEBUG nova.compute.manager [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] No waiting events found dispatching network-vif-plugged-1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.665226] env[67015]: WARNING nova.compute.manager [req-30626ce2-f408-420f-b90e-2ef7cb0fbdf8 req-1ef9ebd0-392a-4ae9-b7a1-75de90a53b17 service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Received unexpected event network-vif-plugged-1260e714-732e-4fd3-b31a-c5c923e65674 for instance with vm_state building and task_state spawning. [ 1492.741704] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Successfully updated port: 8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.752932] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.753315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquired lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.753944] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1492.787831] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Successfully updated port: 1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.796956] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.797443] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquired lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.797690] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1492.803015] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1492.847474] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1492.980444] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Updating instance_info_cache with network_info: [{"id": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "address": "fa:16:3e:83:13:02", "network": {"id": "e7046521-e56d-4c37-b3b9-f910de4929c0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1368515439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4406b2fac7884b1a8e60f467118b923b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8839ca-40", "ovs_interfaceid": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.991520] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Releasing lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.991983] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance network_info: |[{"id": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "address": "fa:16:3e:83:13:02", "network": {"id": "e7046521-e56d-4c37-b3b9-f910de4929c0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1368515439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4406b2fac7884b1a8e60f467118b923b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8839ca-40", "ovs_interfaceid": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1492.995015] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:13:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d8839ca-40a2-4a14-a5f2-49069fdcbf34', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.001432] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Creating folder: Project (4406b2fac7884b1a8e60f467118b923b). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1493.009016] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f11751d-b242-4a2a-9701-93663707f28b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.022898] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Created folder: Project (4406b2fac7884b1a8e60f467118b923b) in parent group-v623108. [ 1493.022898] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Creating folder: Instances. Parent ref: group-v623198. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1493.022898] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad794510-bb5a-4b78-a475-0128c27f0ec4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.028875] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Created folder: Instances in parent group-v623198. [ 1493.029131] env[67015]: DEBUG oslo.service.loopingcall [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.029316] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1493.029500] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eef4b9cf-6f2b-4b28-bf25-bf6bf925e072 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.048700] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.048700] env[67015]: value = "task-3114520" [ 1493.048700] env[67015]: _type = "Task" [ 1493.048700] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.058009] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114520, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.099549] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Updating instance_info_cache with network_info: [{"id": "1260e714-732e-4fd3-b31a-c5c923e65674", "address": "fa:16:3e:2f:cc:4e", "network": {"id": "b648056b-360a-4859-9312-431461c9c126", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-984038004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ae460357cb49bebc05c4d993d2fddf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1260e714-73", "ovs_interfaceid": "1260e714-732e-4fd3-b31a-c5c923e65674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.117733] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Releasing lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.118064] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance network_info: |[{"id": "1260e714-732e-4fd3-b31a-c5c923e65674", "address": "fa:16:3e:2f:cc:4e", "network": {"id": "b648056b-360a-4859-9312-431461c9c126", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-984038004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ae460357cb49bebc05c4d993d2fddf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1260e714-73", "ovs_interfaceid": "1260e714-732e-4fd3-b31a-c5c923e65674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1493.118501] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:cc:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1260e714-732e-4fd3-b31a-c5c923e65674', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.127868] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Creating folder: Project (a6ae460357cb49bebc05c4d993d2fddf). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1493.128558] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2ed11a8-d876-49cf-8293-d1e69c60ec0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.138742] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Created folder: Project (a6ae460357cb49bebc05c4d993d2fddf) in parent group-v623108. [ 1493.138973] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Creating folder: Instances. Parent ref: group-v623201. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1493.139197] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b82dde0-c7ad-479c-9b66-591a902e56f4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.147955] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Created folder: Instances in parent group-v623201. [ 1493.148226] env[67015]: DEBUG oslo.service.loopingcall [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.148415] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1493.148625] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-897a7813-233f-451a-a289-60d1a36d0078 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.166878] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.166878] env[67015]: value = "task-3114523" [ 1493.166878] env[67015]: _type = "Task" [ 1493.166878] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.174116] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114523, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.513873] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.525954] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.526200] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.526376] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.526573] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1493.527663] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba95d1-7c57-4dde-8a89-8c27d7e5414a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.535794] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff93a22-cb12-4472-9d48-20bd292df6b1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.549282] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe18f94b-ef1d-45fd-9add-e7e40a2b1346 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.560538] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a68fc9a-2ae1-4751-a773-932fef29741e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.563244] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114520, 'name': CreateVM_Task, 'duration_secs': 0.339907} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.563403] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1493.564437] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.564603] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.564908] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1493.565388] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-448ebe53-7a14-41cc-9a82-9d08317d71f2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.592368] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181057MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1493.592525] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.592727] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.597144] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for the task: (returnval){ [ 1493.597144] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52faa805-7a83-6163-f330-5df7c70b9909" [ 1493.597144] env[67015]: _type = "Task" [ 1493.597144] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.604918] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52faa805-7a83-6163-f330-5df7c70b9909, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.672235] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672483] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672519] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672639] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672760] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672878] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.672994] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.673123] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.673239] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.673351] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.679213] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114523, 'name': CreateVM_Task, 'duration_secs': 0.299817} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.679357] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1493.679967] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.684019] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.694265] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.703798] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 03d60eff-5f68-4c04-83e4-926c0ac3103a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.713561] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.713793] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1493.713951] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1493.876854] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9db2579-d1aa-4736-a2dd-ffaf5943cef8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.884539] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d907a35-77c1-44da-963b-12c2421f0e8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.914668] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe1839d-572e-4fe7-9ef3-3f0c05001d54 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.921404] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68df08a-60f1-428c-a179-f04a0d161134 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.933829] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.942626] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1493.955673] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1493.955872] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.107598] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.107859] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.108092] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.108349] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.108654] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.108896] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed1048b9-0d84-4ec3-9010-7e888d4ecea0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.113838] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for the task: (returnval){ [ 1494.113838] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]526379dd-51ec-e190-0aa3-5bbbd7be00af" [ 1494.113838] env[67015]: _type = "Task" [ 1494.113838] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.121439] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]526379dd-51ec-e190-0aa3-5bbbd7be00af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.564798] env[67015]: DEBUG nova.compute.manager [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Received event network-changed-8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1494.565009] env[67015]: DEBUG nova.compute.manager [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Refreshing instance network info cache due to event network-changed-8d8839ca-40a2-4a14-a5f2-49069fdcbf34. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1494.565232] env[67015]: DEBUG oslo_concurrency.lockutils [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] Acquiring lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.565374] env[67015]: DEBUG oslo_concurrency.lockutils [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] Acquired lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.565547] env[67015]: DEBUG nova.network.neutron [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Refreshing network info cache for port 8d8839ca-40a2-4a14-a5f2-49069fdcbf34 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1494.624432] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.624695] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.624908] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.778603] env[67015]: DEBUG nova.compute.manager [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Received event network-changed-1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1494.778960] env[67015]: DEBUG nova.compute.manager [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Refreshing instance network info cache due to event network-changed-1260e714-732e-4fd3-b31a-c5c923e65674. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1494.779266] env[67015]: DEBUG oslo_concurrency.lockutils [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] Acquiring lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.779449] env[67015]: DEBUG oslo_concurrency.lockutils [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] Acquired lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.779590] env[67015]: DEBUG nova.network.neutron [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Refreshing network info cache for port 1260e714-732e-4fd3-b31a-c5c923e65674 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1494.955977] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.958987] env[67015]: DEBUG nova.network.neutron [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Updated VIF entry in instance network info cache for port 8d8839ca-40a2-4a14-a5f2-49069fdcbf34. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1494.959336] env[67015]: DEBUG nova.network.neutron [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Updating instance_info_cache with network_info: [{"id": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "address": "fa:16:3e:83:13:02", "network": {"id": "e7046521-e56d-4c37-b3b9-f910de4929c0", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1368515439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4406b2fac7884b1a8e60f467118b923b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8839ca-40", "ovs_interfaceid": "8d8839ca-40a2-4a14-a5f2-49069fdcbf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.969616] env[67015]: DEBUG oslo_concurrency.lockutils [req-250f5b85-ff5f-4694-9e41-8095fae262a3 req-bd183772-d5af-418c-8dbe-dcf246d073e3 service nova] Releasing lock "refresh_cache-8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.036695] env[67015]: DEBUG nova.network.neutron [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Updated VIF entry in instance network info cache for port 1260e714-732e-4fd3-b31a-c5c923e65674. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1495.037092] env[67015]: DEBUG nova.network.neutron [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Updating instance_info_cache with network_info: [{"id": "1260e714-732e-4fd3-b31a-c5c923e65674", "address": "fa:16:3e:2f:cc:4e", "network": {"id": "b648056b-360a-4859-9312-431461c9c126", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-984038004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ae460357cb49bebc05c4d993d2fddf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1260e714-73", "ovs_interfaceid": "1260e714-732e-4fd3-b31a-c5c923e65674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.046626] env[67015]: DEBUG oslo_concurrency.lockutils [req-18e2e1ee-f9a2-4385-95cf-388d36e45bab req-1ba9e970-f6df-4113-8f5e-ca173f467c5f service nova] Releasing lock "refresh_cache-41f47735-f679-4b30-8e30-f917dcf4db42" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.514611] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.514761] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1499.514897] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.515275] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1499.515275] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1499.538071] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.538222] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.538415] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.538607] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.538787] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.538948] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.539173] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.539341] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.539476] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.539600] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1499.539750] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1502.900938] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "41f47735-f679-4b30-8e30-f917dcf4db42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.132704] env[67015]: WARNING oslo_vmware.rw_handles [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1536.132704] env[67015]: ERROR oslo_vmware.rw_handles [ 1536.133396] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1536.136299] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1536.136698] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Copying Virtual Disk [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/81ef7a88-23a5-40d5-bb44-795beece2244/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1536.137157] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2ee0859-c623-43bf-919f-86974b083b69 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.146123] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for the task: (returnval){ [ 1536.146123] env[67015]: value = "task-3114524" [ 1536.146123] env[67015]: _type = "Task" [ 1536.146123] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.158807] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Task: {'id': task-3114524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.657905] env[67015]: DEBUG oslo_vmware.exceptions [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1536.658169] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.658742] env[67015]: ERROR nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1536.658742] env[67015]: Faults: ['InvalidArgument'] [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Traceback (most recent call last): [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] yield resources [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self.driver.spawn(context, instance, image_meta, [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self._fetch_image_if_missing(context, vi) [ 1536.658742] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] image_cache(vi, tmp_image_ds_loc) [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] vm_util.copy_virtual_disk( [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] session._wait_for_task(vmdk_copy_task) [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return self.wait_for_task(task_ref) [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return evt.wait() [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] result = hub.switch() [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1536.659165] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return self.greenlet.switch() [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self.f(*self.args, **self.kw) [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] raise exceptions.translate_fault(task_info.error) [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Faults: ['InvalidArgument'] [ 1536.659613] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] [ 1536.659613] env[67015]: INFO nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Terminating instance [ 1536.660839] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.661064] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1536.661312] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c0914c2-99f7-42c4-bcef-dd8dd6a81938 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.663879] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1536.664097] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1536.664836] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76400365-b4af-4c2b-9f55-be86b501c754 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.671917] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1536.672174] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7080a39-0f30-42a4-8b3a-16599e549c9b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.674720] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1536.674892] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1536.675613] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb00017f-32dd-4917-a1ef-49f2afa88844 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.680423] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for the task: (returnval){ [ 1536.680423] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52d7acb4-ceee-a2da-cf3c-f9a23ccb67bd" [ 1536.680423] env[67015]: _type = "Task" [ 1536.680423] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.687640] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52d7acb4-ceee-a2da-cf3c-f9a23ccb67bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.751421] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1536.751727] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1536.751882] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Deleting the datastore file [datastore2] 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1536.752063] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14c1b5a6-e95b-4af2-8409-115dfca67767 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.759080] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for the task: (returnval){ [ 1536.759080] env[67015]: value = "task-3114526" [ 1536.759080] env[67015]: _type = "Task" [ 1536.759080] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.767794] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Task: {'id': task-3114526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.191113] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1537.191402] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Creating directory with path [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1537.191624] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dfa4f91-2916-483b-a2c4-17b11a2143de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.205508] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Created directory with path [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1537.205736] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Fetch image to [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1537.205969] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1537.206697] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006dcc6b-f575-4836-8598-d9fc11ec826d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.213657] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4bd5a4-d1d1-4227-9c01-fa7deff50bd3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.222688] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367abd3a-3e90-46ab-9d5b-fc506caefeb1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.254130] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744fe5fc-c2e3-4d42-ba18-3001d63d0e1c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.262819] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5cd6f395-7f94-4af1-b457-521c68bdc30f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.269219] env[67015]: DEBUG oslo_vmware.api [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Task: {'id': task-3114526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192816} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.269494] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1537.269679] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1537.269851] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1537.270040] env[67015]: INFO nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1537.272252] env[67015]: DEBUG nova.compute.claims [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1537.272430] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.272652] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.286152] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1537.411332] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1537.472379] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1537.472561] env[67015]: DEBUG oslo_vmware.rw_handles [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1537.554503] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bcb98c-ec2d-4ea6-b67a-55990fabaa25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.562888] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e82237-f196-4aee-bad7-46e3f24fc942 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.592795] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01bac53-10cd-4b68-bdb3-2e780ee30a28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.599915] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24eda42c-9631-43c0-aba6-4baa9cd1fa7d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.614181] env[67015]: DEBUG nova.compute.provider_tree [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.622804] env[67015]: DEBUG nova.scheduler.client.report [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1537.638656] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.366s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.639270] env[67015]: ERROR nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1537.639270] env[67015]: Faults: ['InvalidArgument'] [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Traceback (most recent call last): [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self.driver.spawn(context, instance, image_meta, [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self._fetch_image_if_missing(context, vi) [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] image_cache(vi, tmp_image_ds_loc) [ 1537.639270] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] vm_util.copy_virtual_disk( [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] session._wait_for_task(vmdk_copy_task) [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return self.wait_for_task(task_ref) [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return evt.wait() [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] result = hub.switch() [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] return self.greenlet.switch() [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1537.639667] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] self.f(*self.args, **self.kw) [ 1537.639999] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1537.639999] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] raise exceptions.translate_fault(task_info.error) [ 1537.639999] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1537.639999] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Faults: ['InvalidArgument'] [ 1537.639999] env[67015]: ERROR nova.compute.manager [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] [ 1537.639999] env[67015]: DEBUG nova.compute.utils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1537.641497] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Build of instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 was re-scheduled: A specified parameter was not correct: fileType [ 1537.641497] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1537.641870] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1537.642069] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1537.642249] env[67015]: DEBUG nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1537.642408] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1537.972707] env[67015]: DEBUG nova.network.neutron [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.989094] env[67015]: INFO nova.compute.manager [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Took 0.35 seconds to deallocate network for instance. [ 1538.087476] env[67015]: INFO nova.scheduler.client.report [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Deleted allocations for instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 [ 1538.112803] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7d7f0446-99b0-42f0-81e0-1516cd2701f8 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 574.872s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.114040] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.068s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.114774] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Acquiring lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.114774] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.114979] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.116982] env[67015]: INFO nova.compute.manager [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Terminating instance [ 1538.118885] env[67015]: DEBUG nova.compute.manager [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1538.119100] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1538.119635] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29697396-03d5-4c7e-840f-46129222ce70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.128796] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1ce2ef-1087-4f87-8eb4-c6673ef34dd1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.141491] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1538.163654] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9 could not be found. [ 1538.163820] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1538.164027] env[67015]: INFO nova.compute.manager [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1538.164319] env[67015]: DEBUG oslo.service.loopingcall [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1538.164491] env[67015]: DEBUG nova.compute.manager [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1538.164589] env[67015]: DEBUG nova.network.neutron [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1538.190232] env[67015]: DEBUG nova.network.neutron [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.200083] env[67015]: INFO nova.compute.manager [-] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] Took 0.03 seconds to deallocate network for instance. [ 1538.202122] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.202336] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.204235] env[67015]: INFO nova.compute.claims [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.305226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-11269a0e-430f-48d3-ae8e-6b530f59efa3 tempest-AttachInterfacesV270Test-424144197 tempest-AttachInterfacesV270Test-424144197-project-member] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.306623] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 367.491s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.307116] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 132f4b05-aaf0-4e58-9fcf-2490d4aed2e9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1538.307464] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "132f4b05-aaf0-4e58-9fcf-2490d4aed2e9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.437729] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25efba0b-0ff6-4674-bb83-0e7347e4dd82 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.445710] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb271f7-90e4-4317-ac5c-d402e5e9d1ef {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.476060] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99569f84-c5dc-405e-b047-6d619d161414 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.482971] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f848eb-d359-409f-8c5a-5c85fa82d49f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.495477] env[67015]: DEBUG nova.compute.provider_tree [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.506113] env[67015]: DEBUG nova.scheduler.client.report [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1538.523367] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.321s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.523821] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1538.554338] env[67015]: DEBUG nova.compute.utils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1538.555708] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1538.555875] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1538.564868] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1538.609688] env[67015]: DEBUG nova.policy [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c86a33e0be724f9dab93ec0042b4ec82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '399cd45e86014c9298271d52ae1e21b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1538.625183] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1538.651637] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1538.651877] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1538.652050] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1538.652238] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1538.652415] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1538.652575] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1538.652788] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1538.652947] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1538.653129] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1538.653294] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1538.653469] env[67015]: DEBUG nova.virt.hardware [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1538.654363] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8136a9fd-8ca3-4021-8848-41fd7660f09e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.662498] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4235c6a8-be2c-4da0-8356-2f998e934e9d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.920130] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Successfully created port: 7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1539.517523] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Successfully updated port: 7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1539.535491] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.535631] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquired lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.535779] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1539.583265] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1539.753234] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Updating instance_info_cache with network_info: [{"id": "7e281a82-21c8-4979-aebc-4262170dd16b", "address": "fa:16:3e:59:ac:71", "network": {"id": "e5786d5b-528b-4b8e-ad37-5804b9384e8e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520362497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "399cd45e86014c9298271d52ae1e21b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e281a82-21", "ovs_interfaceid": "7e281a82-21c8-4979-aebc-4262170dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.766671] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Releasing lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.767120] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance network_info: |[{"id": "7e281a82-21c8-4979-aebc-4262170dd16b", "address": "fa:16:3e:59:ac:71", "network": {"id": "e5786d5b-528b-4b8e-ad37-5804b9384e8e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520362497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "399cd45e86014c9298271d52ae1e21b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e281a82-21", "ovs_interfaceid": "7e281a82-21c8-4979-aebc-4262170dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1539.767381] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:ac:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb18870e-f482-4c7b-8cd4-5c933d3ad294', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e281a82-21c8-4979-aebc-4262170dd16b', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.774715] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Creating folder: Project (399cd45e86014c9298271d52ae1e21b7). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1539.775424] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69040b54-1f95-43ee-9609-82fbe28c97c8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.785233] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Created folder: Project (399cd45e86014c9298271d52ae1e21b7) in parent group-v623108. [ 1539.785412] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Creating folder: Instances. Parent ref: group-v623204. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1539.785621] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cdbdc03-5f81-4a1d-8a14-355bacab61b6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.794632] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Created folder: Instances in parent group-v623204. [ 1539.794847] env[67015]: DEBUG oslo.service.loopingcall [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.795030] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1539.795213] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-041666ef-445e-4400-9f80-6da2604101ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.813532] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.813532] env[67015]: value = "task-3114529" [ 1539.813532] env[67015]: _type = "Task" [ 1539.813532] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.826874] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114529, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.055820] env[67015]: DEBUG nova.compute.manager [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Received event network-vif-plugged-7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1540.056054] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Acquiring lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.056270] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.056484] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.056694] env[67015]: DEBUG nova.compute.manager [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] No waiting events found dispatching network-vif-plugged-7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1540.056806] env[67015]: WARNING nova.compute.manager [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Received unexpected event network-vif-plugged-7e281a82-21c8-4979-aebc-4262170dd16b for instance with vm_state building and task_state spawning. [ 1540.056960] env[67015]: DEBUG nova.compute.manager [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Received event network-changed-7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1540.057130] env[67015]: DEBUG nova.compute.manager [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Refreshing instance network info cache due to event network-changed-7e281a82-21c8-4979-aebc-4262170dd16b. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1540.057312] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Acquiring lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.057450] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Acquired lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.057606] env[67015]: DEBUG nova.network.neutron [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Refreshing network info cache for port 7e281a82-21c8-4979-aebc-4262170dd16b {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1540.300319] env[67015]: DEBUG nova.network.neutron [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Updated VIF entry in instance network info cache for port 7e281a82-21c8-4979-aebc-4262170dd16b. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1540.300758] env[67015]: DEBUG nova.network.neutron [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Updating instance_info_cache with network_info: [{"id": "7e281a82-21c8-4979-aebc-4262170dd16b", "address": "fa:16:3e:59:ac:71", "network": {"id": "e5786d5b-528b-4b8e-ad37-5804b9384e8e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520362497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "399cd45e86014c9298271d52ae1e21b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e281a82-21", "ovs_interfaceid": "7e281a82-21c8-4979-aebc-4262170dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.310580] env[67015]: DEBUG oslo_concurrency.lockutils [req-3428ad5d-c27c-46d2-bcd2-3751b7333907 req-39763c6c-9959-4bcb-b73f-334adc5a2e82 service nova] Releasing lock "refresh_cache-199b0508-5b88-41b4-ae08-dcdabb656686" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.323408] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114529, 'name': CreateVM_Task, 'duration_secs': 0.290956} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.323555] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1540.324182] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.324342] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.324655] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.324898] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47dd70c2-5472-4593-aa2f-d82a97c12335 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.329216] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for the task: (returnval){ [ 1540.329216] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5259007d-fc4a-f325-8854-58c343c2f44c" [ 1540.329216] env[67015]: _type = "Task" [ 1540.329216] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.336655] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5259007d-fc4a-f325-8854-58c343c2f44c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.839326] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.839635] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1540.839804] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.514548] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.509326] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.513929] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.514131] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.513592] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.514504] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.526848] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.527079] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.527263] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.527442] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1554.528648] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1033f23f-fff3-43c1-991e-544e01c2b2f5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.537513] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88bfa01-c5d1-4421-951d-cbd85d23feb6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.553031] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cdd804-f36d-4dba-9782-c4a357bf05e5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.559290] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d482b2-e34d-4351-868e-4943b5adaa72 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.590021] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180990MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1554.590021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.590021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.660671] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.660836] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.660965] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661098] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661218] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661334] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661625] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661625] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661728] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.661858] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.674480] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.685139] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 03d60eff-5f68-4c04-83e4-926c0ac3103a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.716720] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.717094] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1554.717293] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1554.860851] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051ea75c-02f4-43d4-915f-626b4fe14c12 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.868130] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd598b0-ff5c-4617-a119-11a547f5aa03 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.898323] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7b43da-7f4c-4192-8d51-f445548815b8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.905200] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193477c-38bf-456f-a728-8068a02fce7b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.917689] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.926946] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1554.940069] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1554.940245] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.351s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.940056] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.514253] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.514492] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1556.772627] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "f7de465c-7557-41d0-b71a-ad0872c93745" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.772870] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.338052] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.513971] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1559.514330] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1559.514330] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1559.535540] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.535674] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.535806] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.535930] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536065] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536206] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536326] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536448] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536566] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536683] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1559.536828] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1580.300917] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "199b0508-5b88-41b4-ae08-dcdabb656686" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.499936] env[67015]: WARNING oslo_vmware.rw_handles [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1584.499936] env[67015]: ERROR oslo_vmware.rw_handles [ 1584.500624] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1584.502393] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1584.502650] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Copying Virtual Disk [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/67f95494-5a20-44fa-b0d9-e0bdbbcdbd5b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1584.502937] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2af95144-de42-4cf5-8a94-a07fca918ee6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.510959] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for the task: (returnval){ [ 1584.510959] env[67015]: value = "task-3114530" [ 1584.510959] env[67015]: _type = "Task" [ 1584.510959] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.518782] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Task: {'id': task-3114530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.021479] env[67015]: DEBUG oslo_vmware.exceptions [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1585.021737] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.022367] env[67015]: ERROR nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.022367] env[67015]: Faults: ['InvalidArgument'] [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Traceback (most recent call last): [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] yield resources [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self.driver.spawn(context, instance, image_meta, [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self._fetch_image_if_missing(context, vi) [ 1585.022367] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] image_cache(vi, tmp_image_ds_loc) [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] vm_util.copy_virtual_disk( [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] session._wait_for_task(vmdk_copy_task) [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return self.wait_for_task(task_ref) [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return evt.wait() [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] result = hub.switch() [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1585.022959] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return self.greenlet.switch() [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self.f(*self.args, **self.kw) [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] raise exceptions.translate_fault(task_info.error) [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Faults: ['InvalidArgument'] [ 1585.023733] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] [ 1585.023733] env[67015]: INFO nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Terminating instance [ 1585.024389] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.024604] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.024850] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88c7cc32-d300-476b-98f0-0e603c5673de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.027323] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1585.027513] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1585.028278] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0206c11a-6611-4382-9386-2ae5c45bccd8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.035188] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1585.035402] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d207f4a8-2c71-41b1-90f9-88744a435d25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.037595] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.037771] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1585.038706] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30967376-eb35-4248-b955-b23bf0297d88 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.043702] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for the task: (returnval){ [ 1585.043702] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52125a3a-83de-942f-d0ea-b1cda5c13ab3" [ 1585.043702] env[67015]: _type = "Task" [ 1585.043702] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.050548] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52125a3a-83de-942f-d0ea-b1cda5c13ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.107070] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1585.107359] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1585.107482] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Deleting the datastore file [datastore2] 98f18180-bd1c-492d-9fbe-4bf306aca4b2 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1585.107802] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2521f9e-3218-4146-b5b6-a66a78239fed {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.114015] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for the task: (returnval){ [ 1585.114015] env[67015]: value = "task-3114532" [ 1585.114015] env[67015]: _type = "Task" [ 1585.114015] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.121858] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Task: {'id': task-3114532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.553768] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1585.554108] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Creating directory with path [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.554226] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7603d6a0-a370-4f6b-b0c5-961cfff0800b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.565197] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Created directory with path [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.565379] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Fetch image to [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1585.565545] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1585.566250] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570efab7-c7ef-42a0-ae04-f59094430b2d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.572496] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c830bb2-a956-44fb-a94e-2c37cbaabc0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.581006] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32727d94-80aa-4368-b589-6c8ce9df4d86 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.611059] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3f5b3b-3c0a-4eaf-a0de-2105d0cfddc0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.618834] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4e4bdbb2-60fa-400d-8c24-9b9ecc48f58b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.623118] env[67015]: DEBUG oslo_vmware.api [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Task: {'id': task-3114532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088756} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.623655] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1585.623857] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1585.624066] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1585.624255] env[67015]: INFO nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1585.626310] env[67015]: DEBUG nova.compute.claims [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1585.626494] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.626721] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.644655] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1585.697949] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1585.759781] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1585.759979] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1585.890836] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659b9efa-66a4-43e8-81ec-ba5f9e9fb94c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.898257] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec5e5dd-18c2-473c-9ca3-6b2b00dad9c7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.926358] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e959ae-65f0-4ad8-b0ef-a3df34b7b5d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.932796] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13efe9c3-d498-4fe4-ae5f-eef77bbd86ab {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.945995] env[67015]: DEBUG nova.compute.provider_tree [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.954139] env[67015]: DEBUG nova.scheduler.client.report [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1585.969894] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.343s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.970446] env[67015]: ERROR nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.970446] env[67015]: Faults: ['InvalidArgument'] [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Traceback (most recent call last): [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self.driver.spawn(context, instance, image_meta, [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self._fetch_image_if_missing(context, vi) [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] image_cache(vi, tmp_image_ds_loc) [ 1585.970446] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] vm_util.copy_virtual_disk( [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] session._wait_for_task(vmdk_copy_task) [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return self.wait_for_task(task_ref) [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return evt.wait() [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] result = hub.switch() [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] return self.greenlet.switch() [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1585.970815] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] self.f(*self.args, **self.kw) [ 1585.971137] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1585.971137] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] raise exceptions.translate_fault(task_info.error) [ 1585.971137] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.971137] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Faults: ['InvalidArgument'] [ 1585.971137] env[67015]: ERROR nova.compute.manager [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] [ 1585.971348] env[67015]: DEBUG nova.compute.utils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1585.972538] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Build of instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 was re-scheduled: A specified parameter was not correct: fileType [ 1585.972538] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1585.972914] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1585.973106] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1585.973279] env[67015]: DEBUG nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1585.973440] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1586.277115] env[67015]: DEBUG nova.network.neutron [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.287839] env[67015]: INFO nova.compute.manager [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Took 0.31 seconds to deallocate network for instance. [ 1586.400117] env[67015]: INFO nova.scheduler.client.report [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Deleted allocations for instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 [ 1586.422172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-a5364568-6528-4506-a585-5c9f1a9cc23e tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 570.985s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.423433] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 374.695s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.423651] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Acquiring lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.423860] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.424046] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.426385] env[67015]: INFO nova.compute.manager [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Terminating instance [ 1586.428121] env[67015]: DEBUG nova.compute.manager [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1586.428317] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1586.428728] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1890ddf-c27d-4cc3-86ea-4aac60003efe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.434761] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1586.441055] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec921e1c-26bc-47e2-a25a-1697d6c90ae5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.469829] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98f18180-bd1c-492d-9fbe-4bf306aca4b2 could not be found. [ 1586.470050] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1586.470233] env[67015]: INFO nova.compute.manager [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1586.470470] env[67015]: DEBUG oslo.service.loopingcall [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.472647] env[67015]: DEBUG nova.compute.manager [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1586.472752] env[67015]: DEBUG nova.network.neutron [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1586.486388] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.486621] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.488138] env[67015]: INFO nova.compute.claims [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.499487] env[67015]: DEBUG nova.network.neutron [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.509966] env[67015]: INFO nova.compute.manager [-] [instance: 98f18180-bd1c-492d-9fbe-4bf306aca4b2] Took 0.04 seconds to deallocate network for instance. [ 1586.595363] env[67015]: DEBUG oslo_concurrency.lockutils [None req-25e0b05f-f17f-4e08-bd07-2d36b4e7446d tempest-AttachInterfacesTestJSON-1590599813 tempest-AttachInterfacesTestJSON-1590599813-project-member] Lock "98f18180-bd1c-492d-9fbe-4bf306aca4b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.693460] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62eeabac-cbba-410a-8295-b0cf5e1affa7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.700439] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e922415f-eadc-404c-9a6e-ec232bfc9148 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.729387] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59248680-af7a-4a8d-a3b7-041e5c3a3a7b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.736308] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8cf94b-dea1-4294-bdae-1717fac0ce63 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.749474] env[67015]: DEBUG nova.compute.provider_tree [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.758298] env[67015]: DEBUG nova.scheduler.client.report [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1586.772185] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.285s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.772660] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1586.806239] env[67015]: DEBUG nova.compute.utils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1586.807749] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1586.807922] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1586.820328] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1586.869343] env[67015]: DEBUG nova.policy [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3426d6e6afc481e906bf36697b76700', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f4adfa6681f4d0e983249b240d3e20c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1586.882226] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1586.906460] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1586.906706] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1586.906893] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1586.907079] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1586.907239] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1586.907457] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1586.907726] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1586.907907] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1586.908158] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1586.908344] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1586.908522] env[67015]: DEBUG nova.virt.hardware [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1586.909372] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aae130-25df-4f95-99d3-32cbd9bea080 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.918190] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed4abea-ec4f-42da-a13a-a643585f9244 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.312062] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.337187] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Successfully created port: 43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1587.916615] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Successfully updated port: 43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1587.927036] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.927189] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquired lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.927338] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1587.971485] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1588.137153] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updating instance_info_cache with network_info: [{"id": "43d49a8a-8cc7-426a-844e-740def3b3975", "address": "fa:16:3e:b0:c0:30", "network": {"id": "331c0c4f-5cb3-472a-9ee1-a11142e49d66", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1190499982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4adfa6681f4d0e983249b240d3e20c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d49a8a-8c", "ovs_interfaceid": "43d49a8a-8cc7-426a-844e-740def3b3975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.147283] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Releasing lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.147605] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance network_info: |[{"id": "43d49a8a-8cc7-426a-844e-740def3b3975", "address": "fa:16:3e:b0:c0:30", "network": {"id": "331c0c4f-5cb3-472a-9ee1-a11142e49d66", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1190499982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4adfa6681f4d0e983249b240d3e20c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d49a8a-8c", "ovs_interfaceid": "43d49a8a-8cc7-426a-844e-740def3b3975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1588.147965] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:c0:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43d49a8a-8cc7-426a-844e-740def3b3975', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1588.155481] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Creating folder: Project (6f4adfa6681f4d0e983249b240d3e20c). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1588.155985] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bda69bb1-b32d-4308-9019-a790e09fe7d4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.167965] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Created folder: Project (6f4adfa6681f4d0e983249b240d3e20c) in parent group-v623108. [ 1588.168153] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Creating folder: Instances. Parent ref: group-v623207. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1588.168388] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29e423ba-d8d7-4d97-82e2-1b59b5c97c7a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.176964] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Created folder: Instances in parent group-v623207. [ 1588.177230] env[67015]: DEBUG oslo.service.loopingcall [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.177438] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1588.177634] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a952dc92-c334-4e08-bbb8-6682873ff19f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.195713] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.195713] env[67015]: value = "task-3114535" [ 1588.195713] env[67015]: _type = "Task" [ 1588.195713] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.206459] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114535, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.317066] env[67015]: DEBUG nova.compute.manager [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Received event network-vif-plugged-43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1588.317318] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Acquiring lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.317600] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.317818] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.318070] env[67015]: DEBUG nova.compute.manager [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] No waiting events found dispatching network-vif-plugged-43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1588.318270] env[67015]: WARNING nova.compute.manager [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Received unexpected event network-vif-plugged-43d49a8a-8cc7-426a-844e-740def3b3975 for instance with vm_state building and task_state deleting. [ 1588.318438] env[67015]: DEBUG nova.compute.manager [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Received event network-changed-43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1588.318593] env[67015]: DEBUG nova.compute.manager [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Refreshing instance network info cache due to event network-changed-43d49a8a-8cc7-426a-844e-740def3b3975. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1588.318862] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Acquiring lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.319062] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Acquired lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.319243] env[67015]: DEBUG nova.network.neutron [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Refreshing network info cache for port 43d49a8a-8cc7-426a-844e-740def3b3975 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1588.589872] env[67015]: DEBUG nova.network.neutron [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updated VIF entry in instance network info cache for port 43d49a8a-8cc7-426a-844e-740def3b3975. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1588.590231] env[67015]: DEBUG nova.network.neutron [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updating instance_info_cache with network_info: [{"id": "43d49a8a-8cc7-426a-844e-740def3b3975", "address": "fa:16:3e:b0:c0:30", "network": {"id": "331c0c4f-5cb3-472a-9ee1-a11142e49d66", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1190499982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4adfa6681f4d0e983249b240d3e20c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d49a8a-8c", "ovs_interfaceid": "43d49a8a-8cc7-426a-844e-740def3b3975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.601075] env[67015]: DEBUG oslo_concurrency.lockutils [req-c9980391-ded3-442e-a7c2-665af6f9f7d1 req-d00a1895-1ab5-43d4-a015-0ee03c3b50a9 service nova] Releasing lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.705505] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114535, 'name': CreateVM_Task, 'duration_secs': 0.30271} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.705666] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1588.706368] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.706538] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.706868] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1588.707122] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-117d8ddf-c1de-4918-bef5-1e1f84f32574 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.711311] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for the task: (returnval){ [ 1588.711311] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]522427a3-109a-9474-5331-0e6c4fc0a8b6" [ 1588.711311] env[67015]: _type = "Task" [ 1588.711311] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.718537] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]522427a3-109a-9474-5331-0e6c4fc0a8b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.221549] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.221857] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1589.222010] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.243436] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "4d61f178-b532-4ddb-958f-68723d041497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.243844] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.514425] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.514861] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.510291] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.513934] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.514282] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1615.514551] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1615.514880] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1615.526543] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.526757] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.526952] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.527130] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1615.528232] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d0e582-ba37-41b4-8813-8fff23fa99ae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.537138] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64c4346-f7a9-4b57-bde9-c429e52fac01 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.550643] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77b8684-e0fc-4b32-8a2e-8ed9b7eced32 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.556614] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95f85af-4b02-4a37-84f9-285161f8d9ad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.584963] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180975MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1615.585197] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.585487] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.657490] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7b744243-c7e5-4253-9273-9d7f84772d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.657649] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.657777] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.657907] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658084] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658219] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658338] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658454] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658569] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.658681] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1615.669540] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1615.679649] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1615.689327] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1615.689541] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1615.689690] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1615.841203] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab108ed-a2fb-4220-8f9d-fbbff17d227e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.848310] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0587c5b6-47cd-400b-906e-1ba55a8fed30 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.876901] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d846e30b-9a86-42a2-aa96-07647aea547b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.883593] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6136f623-5edb-4633-a9ee-c0d8ed66f2fb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.895769] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.904405] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1615.918771] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1615.918946] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.334s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.914454] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.513709] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.514825] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1619.884370] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.884754] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.514617] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1621.514972] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1621.514972] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1621.536848] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537044] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537133] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537257] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537375] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537492] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537609] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537729] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537853] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.537968] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1621.538099] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1631.000166] env[67015]: WARNING oslo_vmware.rw_handles [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1631.000166] env[67015]: ERROR oslo_vmware.rw_handles [ 1631.001129] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1631.003292] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1631.003532] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Copying Virtual Disk [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/ca12125f-46c1-449a-9e3d-8b4cbe772e62/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1631.003825] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecee32a1-02a9-4c08-9018-5b1930061d51 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.011346] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for the task: (returnval){ [ 1631.011346] env[67015]: value = "task-3114536" [ 1631.011346] env[67015]: _type = "Task" [ 1631.011346] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.018743] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Task: {'id': task-3114536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.521841] env[67015]: DEBUG oslo_vmware.exceptions [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1631.522209] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.522804] env[67015]: ERROR nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1631.522804] env[67015]: Faults: ['InvalidArgument'] [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Traceback (most recent call last): [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] yield resources [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self.driver.spawn(context, instance, image_meta, [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self._fetch_image_if_missing(context, vi) [ 1631.522804] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] image_cache(vi, tmp_image_ds_loc) [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] vm_util.copy_virtual_disk( [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] session._wait_for_task(vmdk_copy_task) [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return self.wait_for_task(task_ref) [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return evt.wait() [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] result = hub.switch() [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1631.523338] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return self.greenlet.switch() [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self.f(*self.args, **self.kw) [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] raise exceptions.translate_fault(task_info.error) [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Faults: ['InvalidArgument'] [ 1631.523849] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] [ 1631.523849] env[67015]: INFO nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Terminating instance [ 1631.524767] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.524961] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1631.525215] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49eb9e0c-3678-4f12-b850-fe960de599e8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.527652] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1631.527867] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1631.528598] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec5e00e-4252-4ec8-b1f1-7bf5960ce8fc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.535341] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1631.535552] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32d5b5aa-e565-44d9-9a57-18ff874574ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.540679] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1631.540857] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1631.541547] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3428184-a934-4b62-9d05-0b31155bba3d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.546291] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for the task: (returnval){ [ 1631.546291] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]525401fe-6d6b-4f2e-cc7c-8bceb577b494" [ 1631.546291] env[67015]: _type = "Task" [ 1631.546291] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.553103] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]525401fe-6d6b-4f2e-cc7c-8bceb577b494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.602393] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1631.602599] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1631.602775] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Deleting the datastore file [datastore2] 7b744243-c7e5-4253-9273-9d7f84772d96 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1631.603040] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43065e85-1283-431e-8b77-ca75fe9b54e8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.609131] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for the task: (returnval){ [ 1631.609131] env[67015]: value = "task-3114538" [ 1631.609131] env[67015]: _type = "Task" [ 1631.609131] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.616275] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Task: {'id': task-3114538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.057531] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1632.057825] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Creating directory with path [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1632.058126] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bf5c4d5-1aa1-4dd8-9c22-c637e029592a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.069351] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Created directory with path [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1632.069534] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Fetch image to [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1632.069702] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1632.070467] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737e11e9-e3f5-4aec-a749-17c829ec8510 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.076881] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac55edb-d992-4268-94c0-16d7667bb5ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.085508] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de41cd32-4476-420d-b821-bfa6ce982d8f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.118480] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34cc27f-8dc0-4c78-b9b3-9376740ab789 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.125453] env[67015]: DEBUG oslo_vmware.api [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Task: {'id': task-3114538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087595} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.126769] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1632.126960] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1632.127146] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1632.127320] env[67015]: INFO nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1632.129050] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b2b4723b-6e3d-4fbd-ab9b-53871dba5da2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.130821] env[67015]: DEBUG nova.compute.claims [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1632.130997] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.131230] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.154625] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1632.207722] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1632.268512] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1632.268713] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1632.399033] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799adf05-19b5-40d6-ae3c-820c225249de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.407127] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63391e4-5a70-46b9-b559-589f337ea7b1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.435513] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d88ffb-ebca-4cc5-8c89-36ea8515d3ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.442332] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea6fc68-e184-4358-a0b3-700b8e0942c0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.455738] env[67015]: DEBUG nova.compute.provider_tree [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.464777] env[67015]: DEBUG nova.scheduler.client.report [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1632.479077] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.479597] env[67015]: ERROR nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1632.479597] env[67015]: Faults: ['InvalidArgument'] [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Traceback (most recent call last): [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self.driver.spawn(context, instance, image_meta, [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self._fetch_image_if_missing(context, vi) [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] image_cache(vi, tmp_image_ds_loc) [ 1632.479597] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] vm_util.copy_virtual_disk( [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] session._wait_for_task(vmdk_copy_task) [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return self.wait_for_task(task_ref) [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return evt.wait() [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] result = hub.switch() [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] return self.greenlet.switch() [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1632.480014] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] self.f(*self.args, **self.kw) [ 1632.480393] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1632.480393] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] raise exceptions.translate_fault(task_info.error) [ 1632.480393] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1632.480393] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Faults: ['InvalidArgument'] [ 1632.480393] env[67015]: ERROR nova.compute.manager [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] [ 1632.480393] env[67015]: DEBUG nova.compute.utils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1632.481734] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Build of instance 7b744243-c7e5-4253-9273-9d7f84772d96 was re-scheduled: A specified parameter was not correct: fileType [ 1632.481734] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1632.482115] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1632.482295] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1632.482466] env[67015]: DEBUG nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1632.482633] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1632.871575] env[67015]: DEBUG nova.network.neutron [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.884207] env[67015]: INFO nova.compute.manager [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Took 0.40 seconds to deallocate network for instance. [ 1632.978863] env[67015]: INFO nova.scheduler.client.report [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Deleted allocations for instance 7b744243-c7e5-4253-9273-9d7f84772d96 [ 1632.999733] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7be69feb-a1ba-4e4c-bca8-bfea3b554118 tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.777s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.001013] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.569s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.001257] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Acquiring lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.001463] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.001636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.003593] env[67015]: INFO nova.compute.manager [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Terminating instance [ 1633.005303] env[67015]: DEBUG nova.compute.manager [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1633.005496] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1633.005959] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e4d22f1-dab2-4cee-a974-7056547fdd36 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.012508] env[67015]: DEBUG nova.compute.manager [None req-d5f58724-5684-46f0-9573-5ec08310db46 tempest-ServersListShow296Test-2116193991 tempest-ServersListShow296Test-2116193991-project-member] [instance: 03d60eff-5f68-4c04-83e4-926c0ac3103a] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1633.017625] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d81915-4e54-483f-8942-4959776dc81b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.046185] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7b744243-c7e5-4253-9273-9d7f84772d96 could not be found. [ 1633.046387] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1633.046562] env[67015]: INFO nova.compute.manager [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1633.046799] env[67015]: DEBUG oslo.service.loopingcall [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1633.047204] env[67015]: DEBUG nova.compute.manager [None req-d5f58724-5684-46f0-9573-5ec08310db46 tempest-ServersListShow296Test-2116193991 tempest-ServersListShow296Test-2116193991-project-member] [instance: 03d60eff-5f68-4c04-83e4-926c0ac3103a] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1633.048031] env[67015]: DEBUG nova.compute.manager [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1633.048138] env[67015]: DEBUG nova.network.neutron [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1633.069585] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d5f58724-5684-46f0-9573-5ec08310db46 tempest-ServersListShow296Test-2116193991 tempest-ServersListShow296Test-2116193991-project-member] Lock "03d60eff-5f68-4c04-83e4-926c0ac3103a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.310s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.071218] env[67015]: DEBUG nova.network.neutron [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.077934] env[67015]: DEBUG nova.compute.manager [None req-78af0ae1-30e7-42a2-a8a8-0691615ba325 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1633.080517] env[67015]: INFO nova.compute.manager [-] [instance: 7b744243-c7e5-4253-9273-9d7f84772d96] Took 0.03 seconds to deallocate network for instance. [ 1633.099062] env[67015]: DEBUG nova.compute.manager [None req-78af0ae1-30e7-42a2-a8a8-0691615ba325 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 210c7815-0a29-47a6-a4c0-5e7f9ca5abf2] Instance disappeared before build. {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1633.119946] env[67015]: DEBUG oslo_concurrency.lockutils [None req-78af0ae1-30e7-42a2-a8a8-0691615ba325 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "210c7815-0a29-47a6-a4c0-5e7f9ca5abf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.805s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.130843] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1633.167315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9f8574ad-8d0f-4beb-89c3-2c1a65e66fcb tempest-ServerActionsTestOtherB-1508818736 tempest-ServerActionsTestOtherB-1508818736-project-member] Lock "7b744243-c7e5-4253-9273-9d7f84772d96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.185899] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.186156] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.187548] env[67015]: INFO nova.compute.claims [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.358662] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187d8b31-635d-4726-a8ae-214f96c34f5c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.366370] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afa1814-bb6d-4abb-ad04-480493316400 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.395222] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8ba4ce-ff30-4062-a525-c15e5b3dc951 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.401867] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c50de2-c1fd-4102-b047-332be80db765 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.414327] env[67015]: DEBUG nova.compute.provider_tree [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1633.423063] env[67015]: DEBUG nova.scheduler.client.report [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1633.436527] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.250s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.437035] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1633.469323] env[67015]: DEBUG nova.compute.utils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1633.470730] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1633.470917] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1633.478941] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1633.529490] env[67015]: DEBUG nova.policy [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '585aaf69bf474881ba8f6f13b7fba1b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54ce93521ed148d397c6dd8905557b34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1633.540626] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1633.565823] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1633.566081] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1633.566247] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1633.566427] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1633.566573] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1633.566718] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1633.566923] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1633.567101] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1633.567269] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1633.567428] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1633.567599] env[67015]: DEBUG nova.virt.hardware [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1633.568486] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f073c89d-bfa4-49b5-8716-84b12866bb45 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.577524] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a004a694-a744-4473-904f-ae7630563405 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.858889] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Successfully created port: 0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1634.419117] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Successfully updated port: 0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1634.430268] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.430406] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.430555] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1634.467709] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1634.623184] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Updating instance_info_cache with network_info: [{"id": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "address": "fa:16:3e:9e:e0:aa", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0018e0ed-fe", "ovs_interfaceid": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.636622] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.636894] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance network_info: |[{"id": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "address": "fa:16:3e:9e:e0:aa", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0018e0ed-fe", "ovs_interfaceid": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1634.637327] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:e0:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0018e0ed-fec9-48cd-a93a-85bcb48bbd6c', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1634.644642] env[67015]: DEBUG oslo.service.loopingcall [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1634.645075] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1634.645296] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1076996-5261-4123-b83b-cc6d7bcd5467 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.665059] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1634.665059] env[67015]: value = "task-3114539" [ 1634.665059] env[67015]: _type = "Task" [ 1634.665059] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.675058] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114539, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.986979] env[67015]: DEBUG nova.compute.manager [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Received event network-vif-plugged-0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1634.987223] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Acquiring lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.987674] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.987857] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.988115] env[67015]: DEBUG nova.compute.manager [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] No waiting events found dispatching network-vif-plugged-0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1634.988294] env[67015]: WARNING nova.compute.manager [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Received unexpected event network-vif-plugged-0018e0ed-fec9-48cd-a93a-85bcb48bbd6c for instance with vm_state building and task_state spawning. [ 1634.988523] env[67015]: DEBUG nova.compute.manager [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Received event network-changed-0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1634.988617] env[67015]: DEBUG nova.compute.manager [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Refreshing instance network info cache due to event network-changed-0018e0ed-fec9-48cd-a93a-85bcb48bbd6c. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1634.988795] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Acquiring lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.988928] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Acquired lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.989093] env[67015]: DEBUG nova.network.neutron [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Refreshing network info cache for port 0018e0ed-fec9-48cd-a93a-85bcb48bbd6c {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1635.175014] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114539, 'name': CreateVM_Task, 'duration_secs': 0.281692} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.175195] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1635.175841] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.176013] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.176324] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1635.176562] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b00192-bc33-49df-94c5-1b93c0ef1259 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.180805] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1635.180805] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52778e28-b4b7-6629-2abf-b108e76f07e1" [ 1635.180805] env[67015]: _type = "Task" [ 1635.180805] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.187857] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52778e28-b4b7-6629-2abf-b108e76f07e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.246396] env[67015]: DEBUG nova.network.neutron [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Updated VIF entry in instance network info cache for port 0018e0ed-fec9-48cd-a93a-85bcb48bbd6c. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1635.246736] env[67015]: DEBUG nova.network.neutron [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Updating instance_info_cache with network_info: [{"id": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "address": "fa:16:3e:9e:e0:aa", "network": {"id": "a883b9e1-0130-4802-81b3-5c676e3f49c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-4227508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54ce93521ed148d397c6dd8905557b34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0018e0ed-fe", "ovs_interfaceid": "0018e0ed-fec9-48cd-a93a-85bcb48bbd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.255797] env[67015]: DEBUG oslo_concurrency.lockutils [req-f4466d8d-1a1c-420a-85d0-fc5a41c87d56 req-7289d6fd-859b-4489-bc26-821c1a95ee48 service nova] Releasing lock "refresh_cache-f7de465c-7557-41d0-b71a-ad0872c93745" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.692451] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.692817] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1635.692861] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.514123] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.509571] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.514476] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.514476] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.515800] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.528981] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.529223] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.529398] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.529566] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1675.530699] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2e1f35-7998-424e-855f-cafd49385a33 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.539281] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c59182-125b-4107-bcbc-79f1e8f1cd19 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.553354] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58dc036-0ecd-48fc-9ee6-c7c7de11dbee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.559383] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc441f5-0761-4880-9a78-e6587e262ef1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.587769] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181053MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1675.587937] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.588122] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.662704] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.662913] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663091] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663266] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663426] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663576] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663726] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.663871] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.664025] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.664182] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1675.675056] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.685680] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1675.685849] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1675.685967] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1675.831968] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7914a4-3f1d-46c1-bf51-98fade540336 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.839961] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c6816b-91c8-467c-88b5-fdee633c3d6c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.869836] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46241e72-84d5-4204-b811-196f0cfd353c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.876922] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c077ed0-d7dd-4ba4-bbb0-a184de648d0e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.889420] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.897594] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1675.911059] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1675.911059] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.323s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.910030] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.910462] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.514444] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.514779] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1681.179183] env[67015]: WARNING oslo_vmware.rw_handles [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1681.179183] env[67015]: ERROR oslo_vmware.rw_handles [ 1681.179598] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1681.182000] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1681.182310] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Copying Virtual Disk [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/063f8025-90b9-4570-99d0-42661aa80b35/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1681.182629] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7982c2d2-2bd6-465e-b8e4-62a1d0bee6d2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.191428] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for the task: (returnval){ [ 1681.191428] env[67015]: value = "task-3114540" [ 1681.191428] env[67015]: _type = "Task" [ 1681.191428] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.199351] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Task: {'id': task-3114540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.702654] env[67015]: DEBUG oslo_vmware.exceptions [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1681.702932] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.703475] env[67015]: ERROR nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1681.703475] env[67015]: Faults: ['InvalidArgument'] [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Traceback (most recent call last): [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] yield resources [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self.driver.spawn(context, instance, image_meta, [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self._fetch_image_if_missing(context, vi) [ 1681.703475] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] image_cache(vi, tmp_image_ds_loc) [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] vm_util.copy_virtual_disk( [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] session._wait_for_task(vmdk_copy_task) [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return self.wait_for_task(task_ref) [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return evt.wait() [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] result = hub.switch() [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1681.704031] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return self.greenlet.switch() [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self.f(*self.args, **self.kw) [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] raise exceptions.translate_fault(task_info.error) [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Faults: ['InvalidArgument'] [ 1681.704437] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] [ 1681.704437] env[67015]: INFO nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Terminating instance [ 1681.705315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.705519] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.705749] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-767963bc-540f-403d-9278-ad266f66b44f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.707855] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1681.708060] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1681.708790] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec2873a-566f-43aa-a949-712f0367c805 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.715605] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1681.715784] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df5bdbc4-de75-4b11-95dd-ba16b0072100 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.717978] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1681.718164] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1681.719082] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d97a0ca-de9b-4b1a-af4c-650598cf15a9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.723620] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1681.723620] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52fd9be8-6b21-d685-3b73-6c703d2b1bdb" [ 1681.723620] env[67015]: _type = "Task" [ 1681.723620] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.732576] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52fd9be8-6b21-d685-3b73-6c703d2b1bdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.779435] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1681.779651] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1681.779832] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Deleting the datastore file [datastore2] 7a5c2a57-b28d-45e0-ab7b-5a649758b69b {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.780117] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d65cf205-3e1d-4501-97d1-1f5256a05bd2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.785992] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for the task: (returnval){ [ 1681.785992] env[67015]: value = "task-3114542" [ 1681.785992] env[67015]: _type = "Task" [ 1681.785992] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.793511] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Task: {'id': task-3114542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.234180] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1682.234448] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1682.234710] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7f1cc81-d5da-483e-b9c5-7599139044b0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.247398] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1682.247676] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Fetch image to [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1682.247920] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1682.249076] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c621d2-c1ad-4352-b6bb-d55c4d00e252 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.256790] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cda0654-ceec-4044-bf32-559c20a07b46 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.266822] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79634838-65d2-4b93-b03f-2b073c629669 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.300339] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ea36bc-6c23-4769-9634-4cdc2e5fa822 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.309069] env[67015]: DEBUG oslo_vmware.api [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Task: {'id': task-3114542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074131} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.309626] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1682.309828] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1682.310012] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1682.310199] env[67015]: INFO nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1682.311821] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-11ac0398-f66c-4099-b5f0-71946092460f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.313827] env[67015]: DEBUG nova.compute.claims [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1682.314008] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.314234] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.335682] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1682.395320] env[67015]: DEBUG oslo_vmware.rw_handles [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1682.456131] env[67015]: DEBUG oslo_vmware.rw_handles [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1682.456319] env[67015]: DEBUG oslo_vmware.rw_handles [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1682.514643] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.514815] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1682.514938] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1682.538717] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.538884] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539029] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539165] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539291] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539416] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539536] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539655] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539773] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1682.539890] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1682.575132] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeeae10-fbbe-4a52-84ac-413bde0590ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.582615] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4781c0d4-ab45-4f64-a932-7e89fe191b5a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.611418] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef929dd-1812-4009-9677-fa76f9dbc161 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.618581] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590e594c-e974-433e-bc73-1a946d664536 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.631047] env[67015]: DEBUG nova.compute.provider_tree [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.638790] env[67015]: DEBUG nova.scheduler.client.report [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1682.652226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.338s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.652802] env[67015]: ERROR nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.652802] env[67015]: Faults: ['InvalidArgument'] [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Traceback (most recent call last): [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self.driver.spawn(context, instance, image_meta, [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self._fetch_image_if_missing(context, vi) [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] image_cache(vi, tmp_image_ds_loc) [ 1682.652802] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] vm_util.copy_virtual_disk( [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] session._wait_for_task(vmdk_copy_task) [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return self.wait_for_task(task_ref) [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return evt.wait() [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] result = hub.switch() [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] return self.greenlet.switch() [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1682.653145] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] self.f(*self.args, **self.kw) [ 1682.653462] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1682.653462] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] raise exceptions.translate_fault(task_info.error) [ 1682.653462] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.653462] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Faults: ['InvalidArgument'] [ 1682.653462] env[67015]: ERROR nova.compute.manager [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] [ 1682.653594] env[67015]: DEBUG nova.compute.utils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1682.656013] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Build of instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b was re-scheduled: A specified parameter was not correct: fileType [ 1682.656013] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1682.656394] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1682.656571] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1682.656742] env[67015]: DEBUG nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1682.656907] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1683.054912] env[67015]: DEBUG nova.network.neutron [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.069448] env[67015]: INFO nova.compute.manager [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Took 0.41 seconds to deallocate network for instance. [ 1683.166435] env[67015]: INFO nova.scheduler.client.report [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Deleted allocations for instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b [ 1683.189540] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ff5edfe5-45f3-4245-a7ed-da57d17e8418 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 603.499s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.190524] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 408.098s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.190850] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Acquiring lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.190975] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.191162] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.193467] env[67015]: INFO nova.compute.manager [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Terminating instance [ 1683.195179] env[67015]: DEBUG nova.compute.manager [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1683.195374] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1683.195871] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90ec1bdb-9977-4324-bdc5-89416ddb4a9a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.201067] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1683.208519] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf8b4e3-f305-4b3d-9235-c4bfe681a50e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.237616] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a5c2a57-b28d-45e0-ab7b-5a649758b69b could not be found. [ 1683.237832] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1683.238017] env[67015]: INFO nova.compute.manager [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1683.238274] env[67015]: DEBUG oslo.service.loopingcall [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.238498] env[67015]: DEBUG nova.compute.manager [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1683.238594] env[67015]: DEBUG nova.network.neutron [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1683.254834] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.255110] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.256586] env[67015]: INFO nova.compute.claims [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1683.268139] env[67015]: DEBUG nova.network.neutron [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.282277] env[67015]: INFO nova.compute.manager [-] [instance: 7a5c2a57-b28d-45e0-ab7b-5a649758b69b] Took 0.04 seconds to deallocate network for instance. [ 1683.373161] env[67015]: DEBUG oslo_concurrency.lockutils [None req-02260bb6-0104-4b8a-af60-61e7b1c46819 tempest-ServersV294TestFqdnHostnames-1425156443 tempest-ServersV294TestFqdnHostnames-1425156443-project-member] Lock "7a5c2a57-b28d-45e0-ab7b-5a649758b69b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.436483] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791da14d-a321-4c2c-a767-5b2ba6175e66 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.443896] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4347bc7c-dea0-470b-8f41-7a09fed9e9a6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.474873] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca319b0-3cb7-468b-be71-642d9b7ade5f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.481842] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557078e7-2a20-4526-8567-c52e1655756e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.494611] env[67015]: DEBUG nova.compute.provider_tree [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.503324] env[67015]: DEBUG nova.scheduler.client.report [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1683.519077] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.264s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.519527] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1683.552187] env[67015]: DEBUG nova.compute.utils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1683.553612] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1683.553781] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1683.562190] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1683.610043] env[67015]: DEBUG nova.policy [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c296928b2fac410abe4cf22099518f74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65b8edd6f91443aa8051a18bbd34cc29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1683.624413] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1683.650692] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1683.651081] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1683.651410] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1683.651707] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1683.651937] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1683.652181] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1683.652499] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1683.652742] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1683.652996] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1683.653276] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1683.653547] env[67015]: DEBUG nova.virt.hardware [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1683.654749] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfab41f-0be9-40e3-bce0-2286c09b0750 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.665069] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c167bc4-f61e-4a95-988c-7aa170eaa0dc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.927876] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Successfully created port: 96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1684.490022] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Successfully updated port: 96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.504967] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.505149] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.505313] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1684.547631] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1684.703755] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Updating instance_info_cache with network_info: [{"id": "96fbe5b9-84ce-479b-b490-c7165fb38767", "address": "fa:16:3e:cc:b8:2e", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fbe5b9-84", "ovs_interfaceid": "96fbe5b9-84ce-479b-b490-c7165fb38767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.714480] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.714747] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance network_info: |[{"id": "96fbe5b9-84ce-479b-b490-c7165fb38767", "address": "fa:16:3e:cc:b8:2e", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fbe5b9-84", "ovs_interfaceid": "96fbe5b9-84ce-479b-b490-c7165fb38767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1684.715172] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:b8:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96fbe5b9-84ce-479b-b490-c7165fb38767', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1684.722503] env[67015]: DEBUG oslo.service.loopingcall [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.722944] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1684.723181] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b0a981-cec9-4fdd-a836-e6bedbd25dcb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.743540] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1684.743540] env[67015]: value = "task-3114543" [ 1684.743540] env[67015]: _type = "Task" [ 1684.743540] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.752032] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114543, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.182895] env[67015]: DEBUG nova.compute.manager [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Received event network-vif-plugged-96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1685.182895] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Acquiring lock "4d61f178-b532-4ddb-958f-68723d041497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.182895] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Lock "4d61f178-b532-4ddb-958f-68723d041497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.183090] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Lock "4d61f178-b532-4ddb-958f-68723d041497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.183238] env[67015]: DEBUG nova.compute.manager [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] No waiting events found dispatching network-vif-plugged-96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1685.183445] env[67015]: WARNING nova.compute.manager [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Received unexpected event network-vif-plugged-96fbe5b9-84ce-479b-b490-c7165fb38767 for instance with vm_state building and task_state spawning. [ 1685.183552] env[67015]: DEBUG nova.compute.manager [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Received event network-changed-96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1685.183701] env[67015]: DEBUG nova.compute.manager [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Refreshing instance network info cache due to event network-changed-96fbe5b9-84ce-479b-b490-c7165fb38767. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1685.183871] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Acquiring lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.184011] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Acquired lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.184168] env[67015]: DEBUG nova.network.neutron [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Refreshing network info cache for port 96fbe5b9-84ce-479b-b490-c7165fb38767 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1685.253869] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114543, 'name': CreateVM_Task, 'duration_secs': 0.29842} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.254059] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1685.254727] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.254891] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.255265] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1685.255467] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43ae69cc-c9db-41e7-a2a8-7050a765501d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.260032] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1685.260032] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5254c7ae-1249-d198-8489-183ce5443c78" [ 1685.260032] env[67015]: _type = "Task" [ 1685.260032] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.267830] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5254c7ae-1249-d198-8489-183ce5443c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.432250] env[67015]: DEBUG nova.network.neutron [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Updated VIF entry in instance network info cache for port 96fbe5b9-84ce-479b-b490-c7165fb38767. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1685.432648] env[67015]: DEBUG nova.network.neutron [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Updating instance_info_cache with network_info: [{"id": "96fbe5b9-84ce-479b-b490-c7165fb38767", "address": "fa:16:3e:cc:b8:2e", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fbe5b9-84", "ovs_interfaceid": "96fbe5b9-84ce-479b-b490-c7165fb38767", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.441624] env[67015]: DEBUG oslo_concurrency.lockutils [req-cf1a2336-ea18-472c-92d8-3e44f6787ba3 req-a608d6b1-7465-4248-95fe-891b997ea245 service nova] Releasing lock "refresh_cache-4d61f178-b532-4ddb-958f-68723d041497" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.770362] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.770793] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1685.770852] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.594290] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "c1049b71-7c9b-4772-a889-fee93a62cf05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.594664] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.545921] env[67015]: WARNING oslo_vmware.rw_handles [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1728.545921] env[67015]: ERROR oslo_vmware.rw_handles [ 1728.546698] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1728.548423] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1728.548659] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Copying Virtual Disk [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/964c0d52-6b4d-4e66-b051-b45e3e541c31/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1728.548956] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00848614-ecaf-40be-9a05-2811daae4f58 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.556904] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1728.556904] env[67015]: value = "task-3114544" [ 1728.556904] env[67015]: _type = "Task" [ 1728.556904] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.564938] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114544, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.068897] env[67015]: DEBUG oslo_vmware.exceptions [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1729.069198] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.069719] env[67015]: ERROR nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1729.069719] env[67015]: Faults: ['InvalidArgument'] [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Traceback (most recent call last): [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] yield resources [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self.driver.spawn(context, instance, image_meta, [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self._fetch_image_if_missing(context, vi) [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1729.069719] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] image_cache(vi, tmp_image_ds_loc) [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] vm_util.copy_virtual_disk( [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] session._wait_for_task(vmdk_copy_task) [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return self.wait_for_task(task_ref) [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return evt.wait() [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] result = hub.switch() [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return self.greenlet.switch() [ 1729.070170] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self.f(*self.args, **self.kw) [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] raise exceptions.translate_fault(task_info.error) [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Faults: ['InvalidArgument'] [ 1729.070620] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] [ 1729.070620] env[67015]: INFO nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Terminating instance [ 1729.071621] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.071866] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.072121] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c0fc79e-0c06-4710-b276-f037c4232591 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.075615] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1729.075814] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1729.076530] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17506a7b-5bcf-49f3-a8a1-dbf7dadb0e4a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.080136] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.080323] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1729.081291] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-130f29eb-c9bc-4041-b429-5f75dca04a4f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.084937] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1729.085450] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e2c06c1-3c7d-4f16-a64f-cfbf041b67ef {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.087651] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for the task: (returnval){ [ 1729.087651] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5237fb09-1e72-c54c-580c-d76b05ef2be4" [ 1729.087651] env[67015]: _type = "Task" [ 1729.087651] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.094774] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5237fb09-1e72-c54c-580c-d76b05ef2be4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.153050] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1729.153294] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1729.153477] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleting the datastore file [datastore2] c7d8b42d-6455-4489-9f62-8ab9f85e7f76 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1729.153742] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21d446e6-ad3c-42dc-bd86-3ef1ec1eec4e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.160250] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 1729.160250] env[67015]: value = "task-3114546" [ 1729.160250] env[67015]: _type = "Task" [ 1729.160250] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.167455] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.598170] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1729.598500] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Creating directory with path [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.598664] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4dbeeaa2-c519-4a39-a080-e058b4bac7b7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.609499] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Created directory with path [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.609684] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Fetch image to [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1729.609856] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1729.610584] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd0263a-7466-4129-97fa-80a7b9d07dd1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.616719] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed09faa8-d3b3-4b37-801e-15f6e3ab64ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.625411] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0f9bfa-1f07-4be6-a6a3-eba8ec659885 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.654883] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e43997e-6edc-4350-8f13-251b4808caea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.663085] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f60a78c7-8af1-4483-8f6f-2e65e60958e3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.668984] env[67015]: DEBUG oslo_vmware.api [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073709} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.669238] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1729.669419] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1729.669590] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1729.669762] env[67015]: INFO nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1729.671964] env[67015]: DEBUG nova.compute.claims [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1729.672157] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.672393] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.690470] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1729.748384] env[67015]: DEBUG oslo_vmware.rw_handles [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1729.816291] env[67015]: DEBUG oslo_vmware.rw_handles [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1729.816536] env[67015]: DEBUG oslo_vmware.rw_handles [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1729.933517] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478b1c2d-9b5e-48be-a795-f051ed92bece {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.941197] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5909e925-e8ec-4af0-9984-06c8f1ae6733 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.970376] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad26cf66-5771-4f35-b68e-3b43bea5f8e4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.976965] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a9074c-e27e-46b5-96bf-ee16f44c9f64 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.989570] env[67015]: DEBUG nova.compute.provider_tree [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.997563] env[67015]: DEBUG nova.scheduler.client.report [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1730.011412] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.339s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.011950] env[67015]: ERROR nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1730.011950] env[67015]: Faults: ['InvalidArgument'] [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Traceback (most recent call last): [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self.driver.spawn(context, instance, image_meta, [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self._fetch_image_if_missing(context, vi) [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] image_cache(vi, tmp_image_ds_loc) [ 1730.011950] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] vm_util.copy_virtual_disk( [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] session._wait_for_task(vmdk_copy_task) [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return self.wait_for_task(task_ref) [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return evt.wait() [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] result = hub.switch() [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] return self.greenlet.switch() [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1730.012304] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] self.f(*self.args, **self.kw) [ 1730.012618] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1730.012618] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] raise exceptions.translate_fault(task_info.error) [ 1730.012618] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1730.012618] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Faults: ['InvalidArgument'] [ 1730.012618] env[67015]: ERROR nova.compute.manager [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] [ 1730.012747] env[67015]: DEBUG nova.compute.utils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1730.014123] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Build of instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 was re-scheduled: A specified parameter was not correct: fileType [ 1730.014123] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1730.014511] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1730.014700] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1730.014865] env[67015]: DEBUG nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1730.015039] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1730.448829] env[67015]: DEBUG nova.network.neutron [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.459011] env[67015]: INFO nova.compute.manager [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Took 0.44 seconds to deallocate network for instance. [ 1730.562611] env[67015]: INFO nova.scheduler.client.report [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted allocations for instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 [ 1730.583664] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f0267ee-542f-445f-bd88-2e7698538eba tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.921s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.584871] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.633s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.585149] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.585394] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.585586] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.587544] env[67015]: INFO nova.compute.manager [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Terminating instance [ 1730.589192] env[67015]: DEBUG nova.compute.manager [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1730.589439] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1730.589938] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e093d7c-cabe-4e3e-896a-bd4dca5c5be8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.593976] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1730.602823] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520b6dc4-9086-432f-b9bf-6790c651ca8e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.630715] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7d8b42d-6455-4489-9f62-8ab9f85e7f76 could not be found. [ 1730.630934] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1730.631132] env[67015]: INFO nova.compute.manager [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1730.631375] env[67015]: DEBUG oslo.service.loopingcall [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.632179] env[67015]: DEBUG nova.compute.manager [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1730.632281] env[67015]: DEBUG nova.network.neutron [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1730.647871] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.648139] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.649578] env[67015]: INFO nova.compute.claims [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1730.658996] env[67015]: DEBUG nova.network.neutron [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.675619] env[67015]: INFO nova.compute.manager [-] [instance: c7d8b42d-6455-4489-9f62-8ab9f85e7f76] Took 0.04 seconds to deallocate network for instance. [ 1730.760267] env[67015]: DEBUG oslo_concurrency.lockutils [None req-96d57344-1051-4a13-9b61-0c859f222015 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "c7d8b42d-6455-4489-9f62-8ab9f85e7f76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.835216] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64620b61-c1a6-4d44-9036-0a8810986838 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.843204] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaeb367-4861-4764-9769-c62f08d9edac {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.874053] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450c4055-a88b-47c0-ba51-999e630f4ed0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.881462] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9e4355-35ec-4e03-8b00-b28c2edf8cdd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.894864] env[67015]: DEBUG nova.compute.provider_tree [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.904489] env[67015]: DEBUG nova.scheduler.client.report [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1730.918464] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.918935] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1730.956532] env[67015]: DEBUG nova.compute.utils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1730.958727] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1730.959545] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1730.966552] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1731.021208] env[67015]: DEBUG nova.policy [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e358776890d43cab7e5e4a2bf5423c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'caadf2a2334b4336a4bf7d1f0462adfe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1731.027618] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1731.054112] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1731.054376] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1731.054534] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.054715] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1731.054864] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.055024] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1731.055249] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1731.055410] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1731.055577] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1731.055742] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1731.055917] env[67015]: DEBUG nova.virt.hardware [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1731.056800] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ae172a-9b73-48e8-a3c5-669141771703 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.065191] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533d8b05-de77-4fe8-94bd-1cfc10975a87 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.314897] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Successfully created port: ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1731.877316] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Successfully updated port: ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1731.889937] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.890035] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquired lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.890199] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1731.932291] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1732.102587] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Updating instance_info_cache with network_info: [{"id": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "address": "fa:16:3e:e7:b5:e8", "network": {"id": "c2a2b5c1-97e4-4050-b354-fdaed8c9f0c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1005456600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caadf2a2334b4336a4bf7d1f0462adfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad8b99e4-e1", "ovs_interfaceid": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.115089] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Releasing lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.115531] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance network_info: |[{"id": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "address": "fa:16:3e:e7:b5:e8", "network": {"id": "c2a2b5c1-97e4-4050-b354-fdaed8c9f0c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1005456600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caadf2a2334b4336a4bf7d1f0462adfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad8b99e4-e1", "ovs_interfaceid": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1732.115794] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:b5:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad8b99e4-e1f5-40d9-8241-3aa31a82d35e', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1732.123819] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Creating folder: Project (caadf2a2334b4336a4bf7d1f0462adfe). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1732.124249] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26d3c955-d42e-4e49-bb63-b2a3b2405f1a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.134920] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Created folder: Project (caadf2a2334b4336a4bf7d1f0462adfe) in parent group-v623108. [ 1732.135115] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Creating folder: Instances. Parent ref: group-v623212. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1732.135336] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66091805-f8f9-4787-863c-c456ee536592 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.143434] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Created folder: Instances in parent group-v623212. [ 1732.143652] env[67015]: DEBUG oslo.service.loopingcall [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.143829] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1732.144018] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d947363f-988a-4a4e-bc33-5d09a188afc2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.162598] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1732.162598] env[67015]: value = "task-3114549" [ 1732.162598] env[67015]: _type = "Task" [ 1732.162598] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.169826] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114549, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.514418] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1732.618755] env[67015]: DEBUG nova.compute.manager [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Received event network-vif-plugged-ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1732.619154] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Acquiring lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.619499] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.622016] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.622016] env[67015]: DEBUG nova.compute.manager [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] No waiting events found dispatching network-vif-plugged-ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1732.622016] env[67015]: WARNING nova.compute.manager [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Received unexpected event network-vif-plugged-ad8b99e4-e1f5-40d9-8241-3aa31a82d35e for instance with vm_state building and task_state spawning. [ 1732.622016] env[67015]: DEBUG nova.compute.manager [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Received event network-changed-ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1732.622178] env[67015]: DEBUG nova.compute.manager [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Refreshing instance network info cache due to event network-changed-ad8b99e4-e1f5-40d9-8241-3aa31a82d35e. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1732.622178] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Acquiring lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.622178] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Acquired lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.622178] env[67015]: DEBUG nova.network.neutron [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Refreshing network info cache for port ad8b99e4-e1f5-40d9-8241-3aa31a82d35e {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1732.671498] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114549, 'name': CreateVM_Task, 'duration_secs': 0.29847} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.671799] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1732.672437] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.672600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.672931] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1732.673193] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee120f60-498e-4c4a-a890-e599230f1a46 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.677744] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for the task: (returnval){ [ 1732.677744] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]528d07e7-64fb-46cb-d6dc-6bc822fbbca7" [ 1732.677744] env[67015]: _type = "Task" [ 1732.677744] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.685847] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]528d07e7-64fb-46cb-d6dc-6bc822fbbca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.862456] env[67015]: DEBUG nova.network.neutron [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Updated VIF entry in instance network info cache for port ad8b99e4-e1f5-40d9-8241-3aa31a82d35e. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1732.862806] env[67015]: DEBUG nova.network.neutron [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Updating instance_info_cache with network_info: [{"id": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "address": "fa:16:3e:e7:b5:e8", "network": {"id": "c2a2b5c1-97e4-4050-b354-fdaed8c9f0c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1005456600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "caadf2a2334b4336a4bf7d1f0462adfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad8b99e4-e1", "ovs_interfaceid": "ad8b99e4-e1f5-40d9-8241-3aa31a82d35e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.872169] env[67015]: DEBUG oslo_concurrency.lockutils [req-9e2bb2b9-2e8f-4cc0-b051-03fd39d2028a req-64967803-b1dc-4069-a155-3160d71d246a service nova] Releasing lock "refresh_cache-6daf9c76-9471-43ec-9dd6-aaa43efc391b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.187713] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.188100] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1733.188200] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.509143] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.513773] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.513959] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.514453] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.514775] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1735.525645] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1736.526271] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.537421] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.537628] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.537797] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.537951] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1736.539066] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1de8616-d0c8-45ef-b8a5-42e3b800b14c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.547392] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2935cf0c-2a68-4e41-8ddd-6356c4ae25a4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.560705] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487c4230-3a7f-4706-97b0-e607b046cc14 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.566490] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8c8513-a7f7-4476-adbf-d3f2b1cd3a08 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.595625] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181076MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1736.595761] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.595929] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.741390] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.741553] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.741684] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.741823] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.741994] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.742139] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.742260] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.742378] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.742494] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.742610] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.753751] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1736.753983] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1736.754148] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1736.770850] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1736.785644] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1736.785834] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.796578] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1736.814328] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1736.947106] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcfa965-a11e-4643-84aa-4f4c0fa0e4d2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.954784] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cb2546-7bd0-4625-b982-af407935da3e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.983310] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a91055f-6f9e-406a-8296-d9e7bc35dfb3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.989906] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c483ce-56fe-4f25-b3ca-6a5b4949c341 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.003099] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.011557] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1737.025449] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1737.025635] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.430s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.013654] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1738.013988] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1738.511065] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1742.513890] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1742.514366] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1743.514279] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.514579] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1743.514579] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1743.534726] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.534903] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.534998] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535138] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535262] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535384] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535505] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535623] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535741] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535857] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1743.535974] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1748.515055] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.515055] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1752.514605] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.537565] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "f7de465c-7557-41d0-b71a-ad0872c93745" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.543265] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "232b89d8-08a1-45af-91e6-1dc979880009" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.543601] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.768952] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.791153] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 1774.791153] env[67015]: value = "domain-c8" [ 1774.791153] env[67015]: _type = "ClusterComputeResource" [ 1774.791153] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1774.792696] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27068b46-1ba8-47f1-b24b-e07fde037788 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.810221] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 10 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1774.810387] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.810581] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 6d104e2f-9924-4094-823d-a78c21acfc7b {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.810912] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 944728f0-7db6-4cca-a51c-7acb5998cb12 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811166] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 41f47735-f679-4b30-8e30-f917dcf4db42 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811339] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811497] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 199b0508-5b88-41b4-ae08-dcdabb656686 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811651] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811803] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid f7de465c-7557-41d0-b71a-ad0872c93745 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.811984] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 4d61f178-b532-4ddb-958f-68723d041497 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.812158] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 6daf9c76-9471-43ec-9dd6-aaa43efc391b {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1774.812479] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.812709] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "6d104e2f-9924-4094-823d-a78c21acfc7b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.812905] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "944728f0-7db6-4cca-a51c-7acb5998cb12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.813116] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "41f47735-f679-4b30-8e30-f917dcf4db42" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.813343] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.813557] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "199b0508-5b88-41b4-ae08-dcdabb656686" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.813763] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.813957] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "f7de465c-7557-41d0-b71a-ad0872c93745" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.814166] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "4d61f178-b532-4ddb-958f-68723d041497" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.814357] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.329636] env[67015]: WARNING oslo_vmware.rw_handles [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1775.329636] env[67015]: ERROR oslo_vmware.rw_handles [ 1775.330137] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1775.332146] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1775.332389] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Copying Virtual Disk [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/f819b87d-5743-47b3-9dc6-be7fa4fdd65c/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1775.332660] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13ced9a4-cb02-45cf-8f81-1959a3689245 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.340832] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for the task: (returnval){ [ 1775.340832] env[67015]: value = "task-3114550" [ 1775.340832] env[67015]: _type = "Task" [ 1775.340832] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.348783] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Task: {'id': task-3114550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.852086] env[67015]: DEBUG oslo_vmware.exceptions [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1775.852463] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.853460] env[67015]: ERROR nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1775.853460] env[67015]: Faults: ['InvalidArgument'] [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Traceback (most recent call last): [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] yield resources [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self.driver.spawn(context, instance, image_meta, [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self._fetch_image_if_missing(context, vi) [ 1775.853460] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] image_cache(vi, tmp_image_ds_loc) [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] vm_util.copy_virtual_disk( [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] session._wait_for_task(vmdk_copy_task) [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return self.wait_for_task(task_ref) [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return evt.wait() [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] result = hub.switch() [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1775.854268] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return self.greenlet.switch() [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self.f(*self.args, **self.kw) [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] raise exceptions.translate_fault(task_info.error) [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Faults: ['InvalidArgument'] [ 1775.854953] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] [ 1775.854953] env[67015]: INFO nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Terminating instance [ 1775.855443] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.855443] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.855575] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c422f064-a7a2-486b-80bf-92f6a711c8be {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.858311] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1775.858548] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1775.859561] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971b1116-3dbd-4504-b4d4-b3269a28e9c0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.867752] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1775.869177] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de1b7fb-f5d2-4899-acaa-001b4016ae87 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.871106] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.871358] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1775.872124] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d58fb3f4-bba0-4625-8310-8c779778d756 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.878393] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1775.878393] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]522a968c-4a79-2ef0-a02c-e82d1a2a4c76" [ 1775.878393] env[67015]: _type = "Task" [ 1775.878393] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.886779] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]522a968c-4a79-2ef0-a02c-e82d1a2a4c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.948614] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1775.948872] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1775.949085] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Deleting the datastore file [datastore2] 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.949368] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6d4daa0-5ed6-48ec-aacd-c81d1f7ae527 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.956530] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for the task: (returnval){ [ 1775.956530] env[67015]: value = "task-3114552" [ 1775.956530] env[67015]: _type = "Task" [ 1775.956530] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.964810] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Task: {'id': task-3114552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.388904] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1776.389376] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.389468] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e40de06-e0e4-450b-9c8a-e6f91033aa34 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.403024] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.403295] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Fetch image to [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1776.403508] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1776.404235] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06155cea-0011-4324-a7b3-3bedae6fc6da {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.411729] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdb7843-4745-460b-b19c-6ad6f5e62b83 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.421428] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaa9768-7a5a-44ee-a4fc-a2e640273f90 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.455159] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad43578-b554-496c-958c-60978203b0d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.461912] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cf3573e6-ca63-4430-b166-6404b29e3020 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.466445] env[67015]: DEBUG oslo_vmware.api [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Task: {'id': task-3114552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069436} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.467061] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1776.467244] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1776.468377] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1776.468377] env[67015]: INFO nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1776.470171] env[67015]: DEBUG nova.compute.claims [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1776.470348] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.470557] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.487488] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1776.563304] env[67015]: DEBUG oslo_vmware.rw_handles [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1776.626839] env[67015]: DEBUG oslo_vmware.rw_handles [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1776.626839] env[67015]: DEBUG oslo_vmware.rw_handles [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1776.778382] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ac1770-919b-4a07-a717-1ecb9796a78f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.789913] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08311adb-ad2e-435a-9a0c-85299d2598e1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.840105] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0b9fc1-385d-477c-9806-b617e3ee9338 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.851198] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35276b0-0e17-472d-8db7-d36bb618fe25 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.873808] env[67015]: DEBUG nova.compute.provider_tree [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.885082] env[67015]: DEBUG nova.scheduler.client.report [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1776.905545] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.435s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.906383] env[67015]: ERROR nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.906383] env[67015]: Faults: ['InvalidArgument'] [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Traceback (most recent call last): [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self.driver.spawn(context, instance, image_meta, [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self._fetch_image_if_missing(context, vi) [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] image_cache(vi, tmp_image_ds_loc) [ 1776.906383] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] vm_util.copy_virtual_disk( [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] session._wait_for_task(vmdk_copy_task) [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return self.wait_for_task(task_ref) [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return evt.wait() [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] result = hub.switch() [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] return self.greenlet.switch() [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1776.906800] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] self.f(*self.args, **self.kw) [ 1776.907189] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1776.907189] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] raise exceptions.translate_fault(task_info.error) [ 1776.907189] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.907189] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Faults: ['InvalidArgument'] [ 1776.907189] env[67015]: ERROR nova.compute.manager [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] [ 1776.907446] env[67015]: DEBUG nova.compute.utils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1776.909625] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Build of instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 was re-scheduled: A specified parameter was not correct: fileType [ 1776.909625] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1776.910228] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1776.910511] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1776.910796] env[67015]: DEBUG nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1776.911310] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1777.646801] env[67015]: DEBUG nova.network.neutron [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.661551] env[67015]: INFO nova.compute.manager [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Took 0.75 seconds to deallocate network for instance. [ 1777.769498] env[67015]: INFO nova.scheduler.client.report [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Deleted allocations for instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 [ 1777.792592] env[67015]: DEBUG oslo_concurrency.lockutils [None req-3f6ffd37-eb17-4894-8ecc-d87705a60ee5 tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.316s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.794827] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.476s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.795021] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Acquiring lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.795575] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.795720] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.797906] env[67015]: INFO nova.compute.manager [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Terminating instance [ 1777.800372] env[67015]: DEBUG nova.compute.manager [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1777.800567] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1777.800836] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6edd79bf-3dfd-4039-b335-2229de1ce805 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.810687] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abc43ea-8795-4a86-893c-29ee03da6828 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.821851] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1777.842583] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8 could not be found. [ 1777.842756] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1777.842919] env[67015]: INFO nova.compute.manager [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1777.843193] env[67015]: DEBUG oslo.service.loopingcall [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.843415] env[67015]: DEBUG nova.compute.manager [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1777.843510] env[67015]: DEBUG nova.network.neutron [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1777.867804] env[67015]: DEBUG nova.network.neutron [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.871226] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.871460] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.873024] env[67015]: INFO nova.compute.claims [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1777.876251] env[67015]: INFO nova.compute.manager [-] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] Took 0.03 seconds to deallocate network for instance. [ 1777.960183] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e2a5392e-bcfd-49f7-b194-7c8a25d488db tempest-ServersNegativeTestJSON-1435226129 tempest-ServersNegativeTestJSON-1435226129-project-member] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.961054] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.149s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.961382] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 1ef0af78-e94a-44f0-8b4c-adb1ab733ed8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1777.961548] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "1ef0af78-e94a-44f0-8b4c-adb1ab733ed8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.056406] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39850e15-70e9-4f5e-956d-120fe9248686 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.065441] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baaf945-9c44-41ca-be1e-9e8d1eee058b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.094308] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a5e66-a1e5-4738-a84a-3756f62f6978 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.101049] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44a4df2-9fc1-45a7-96c5-39e5e438fd7e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.113911] env[67015]: DEBUG nova.compute.provider_tree [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.124819] env[67015]: DEBUG nova.scheduler.client.report [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1778.139371] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.268s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.139631] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1778.171338] env[67015]: DEBUG nova.compute.utils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.172509] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1778.172678] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1778.181730] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1778.246193] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1778.255984] env[67015]: DEBUG nova.policy [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6a9cb508b1e49b7b08e8a7424a355bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ec3887b74a74400a1a460042adb73ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1778.276084] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1778.276470] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1778.276659] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1778.276848] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1778.276996] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1778.277158] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1778.277366] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1778.277526] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1778.277689] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1778.277849] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1778.278033] env[67015]: DEBUG nova.virt.hardware [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1778.278894] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa19a36-24ec-47d6-8fa9-26ca1ea50f8c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.286729] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ecd60a-1ca7-46fc-8485-c17b4ab1e915 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.602789] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Successfully created port: 55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1779.226409] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Successfully updated port: 55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1779.238695] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.238870] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquired lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.239071] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1779.289141] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1779.452281] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Updating instance_info_cache with network_info: [{"id": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "address": "fa:16:3e:d2:35:03", "network": {"id": "7c1de6e8-ed41-4033-8528-9bf535f253da", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1265859877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ec3887b74a74400a1a460042adb73ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b7edc3-9e", "ovs_interfaceid": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.469013] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Releasing lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.469013] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance network_info: |[{"id": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "address": "fa:16:3e:d2:35:03", "network": {"id": "7c1de6e8-ed41-4033-8528-9bf535f253da", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1265859877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ec3887b74a74400a1a460042adb73ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b7edc3-9e", "ovs_interfaceid": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1779.469207] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:35:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55b7edc3-9e8e-495e-9c67-4de269e305cb', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1779.475564] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Creating folder: Project (0ec3887b74a74400a1a460042adb73ae). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1779.476269] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-486b7498-e67a-4cbf-8cae-016aeecaec9f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.485916] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Created folder: Project (0ec3887b74a74400a1a460042adb73ae) in parent group-v623108. [ 1779.485916] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Creating folder: Instances. Parent ref: group-v623215. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1779.486138] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aae16a2-f1ae-4107-80c2-7ad1e6e69b1c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.497340] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Created folder: Instances in parent group-v623215. [ 1779.497340] env[67015]: DEBUG oslo.service.loopingcall [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.497340] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1779.497340] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cac23952-e54c-486f-af2f-20be64de57c9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.516265] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1779.516265] env[67015]: value = "task-3114555" [ 1779.516265] env[67015]: _type = "Task" [ 1779.516265] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.524633] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114555, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.693920] env[67015]: DEBUG nova.compute.manager [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Received event network-vif-plugged-55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1779.696719] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Acquiring lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.696719] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.696719] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.696719] env[67015]: DEBUG nova.compute.manager [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] No waiting events found dispatching network-vif-plugged-55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1779.697067] env[67015]: WARNING nova.compute.manager [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Received unexpected event network-vif-plugged-55b7edc3-9e8e-495e-9c67-4de269e305cb for instance with vm_state building and task_state spawning. [ 1779.697067] env[67015]: DEBUG nova.compute.manager [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Received event network-changed-55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1779.697067] env[67015]: DEBUG nova.compute.manager [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Refreshing instance network info cache due to event network-changed-55b7edc3-9e8e-495e-9c67-4de269e305cb. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1779.697067] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Acquiring lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.697067] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Acquired lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.697386] env[67015]: DEBUG nova.network.neutron [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Refreshing network info cache for port 55b7edc3-9e8e-495e-9c67-4de269e305cb {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1779.958789] env[67015]: DEBUG nova.network.neutron [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Updated VIF entry in instance network info cache for port 55b7edc3-9e8e-495e-9c67-4de269e305cb. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1779.959159] env[67015]: DEBUG nova.network.neutron [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Updating instance_info_cache with network_info: [{"id": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "address": "fa:16:3e:d2:35:03", "network": {"id": "7c1de6e8-ed41-4033-8528-9bf535f253da", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1265859877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ec3887b74a74400a1a460042adb73ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b7edc3-9e", "ovs_interfaceid": "55b7edc3-9e8e-495e-9c67-4de269e305cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.969450] env[67015]: DEBUG oslo_concurrency.lockutils [req-ecb0c7c6-0b81-428e-8093-6a0c0e5384a2 req-6f54a7ee-90df-4a8a-9860-33a728a50ab6 service nova] Releasing lock "refresh_cache-c1049b71-7c9b-4772-a889-fee93a62cf05" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.025942] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114555, 'name': CreateVM_Task, 'duration_secs': 0.290043} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.026460] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1780.027436] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.027606] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.028203] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1780.028472] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b53ece2a-be92-4d98-b3e3-e60cab85b484 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.032952] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for the task: (returnval){ [ 1780.032952] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]525e7757-b2ee-5137-19ae-c96033adb1de" [ 1780.032952] env[67015]: _type = "Task" [ 1780.032952] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.040914] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]525e7757-b2ee-5137-19ae-c96033adb1de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.543398] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.543685] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1780.543879] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.970113] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.970736] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.292202] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "4d61f178-b532-4ddb-958f-68723d041497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.559391] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1793.559684] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.509589] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.513309] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.513866] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.526063] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.526063] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.526358] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.526358] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1797.527408] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fea1df3-061c-4707-8b93-e4bfb6b4dfa4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.536199] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784c12e7-5d41-4ed5-ad5b-0ba2097b04d1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.549902] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147b0ce2-5256-4327-b247-fcf008601c88 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.556070] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9758066-8612-4134-b4e9-1ef28a756190 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.584158] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181025MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1797.584319] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.584483] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.656633] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d104e2f-9924-4094-823d-a78c21acfc7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.656809] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.656939] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657077] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657205] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657323] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657440] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657557] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657671] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.657784] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1797.668463] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1797.680253] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1797.680521] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1797.680648] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1797.811754] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c841b41e-6ded-49a7-bdbf-e6bfbd0b3b72 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.819734] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885126b9-c008-41d0-b4d5-31bb6d78946d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.848216] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86debba7-dadf-4bcc-b62b-feeb3cda17d0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.854748] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aae943a-cdcb-4b38-a52c-d399dd3c5d35 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.867008] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.875696] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1797.888823] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1797.889007] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.305s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.889660] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1799.514488] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.513969] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.514229] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1804.515259] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.515647] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1804.515647] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.536837] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537029] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537117] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537247] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537369] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537488] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537609] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537728] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537845] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.537963] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1804.538099] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1816.268478] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.908126] env[67015]: WARNING oslo_vmware.rw_handles [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.908126] env[67015]: ERROR oslo_vmware.rw_handles [ 1825.908737] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1825.910639] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1825.910880] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Copying Virtual Disk [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/cd82ce11-6ff6-430b-b8eb-bc8c9fd74950/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1825.911182] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4649d982-9885-4616-8335-0bfdf415e021 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.920182] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1825.920182] env[67015]: value = "task-3114556" [ 1825.920182] env[67015]: _type = "Task" [ 1825.920182] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.927728] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.430629] env[67015]: DEBUG oslo_vmware.exceptions [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1826.430919] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.431520] env[67015]: ERROR nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.431520] env[67015]: Faults: ['InvalidArgument'] [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Traceback (most recent call last): [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] yield resources [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self.driver.spawn(context, instance, image_meta, [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self._fetch_image_if_missing(context, vi) [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1826.431520] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] image_cache(vi, tmp_image_ds_loc) [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] vm_util.copy_virtual_disk( [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] session._wait_for_task(vmdk_copy_task) [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return self.wait_for_task(task_ref) [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return evt.wait() [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] result = hub.switch() [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return self.greenlet.switch() [ 1826.431988] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self.f(*self.args, **self.kw) [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] raise exceptions.translate_fault(task_info.error) [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Faults: ['InvalidArgument'] [ 1826.432444] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] [ 1826.432444] env[67015]: INFO nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Terminating instance [ 1826.433377] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.433608] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.433850] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fa13486-a332-4779-a3f8-35672a6b5c52 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.435971] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1826.436178] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1826.436894] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d10f1b-b789-4e71-a973-3c82b2e2aa81 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.443569] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1826.443792] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b5376cb-44f2-408a-a5b5-27062ae8e798 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.445775] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.445945] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1826.446841] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83f9c0f2-74dc-4094-ba1c-0eed349341ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.451178] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1826.451178] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52819c28-a6ec-03a1-145c-3cff9af55644" [ 1826.451178] env[67015]: _type = "Task" [ 1826.451178] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.458065] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52819c28-a6ec-03a1-145c-3cff9af55644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.521086] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1826.521363] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1826.521546] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleting the datastore file [datastore2] 6d104e2f-9924-4094-823d-a78c21acfc7b {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1826.521811] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-300254eb-41ad-45b0-887f-3ee1785a055f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.528360] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1826.528360] env[67015]: value = "task-3114558" [ 1826.528360] env[67015]: _type = "Task" [ 1826.528360] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.536583] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.961394] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1826.961739] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating directory with path [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.961873] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf0acc80-cae5-406d-8f9d-a6d4038b4cfb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.972869] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Created directory with path [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.973062] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Fetch image to [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1826.973238] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1826.973948] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50799661-daa2-402b-873a-1d689f59215f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.980100] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6ff9ea-2cc6-4d0f-84a8-ea96df5723cf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.988676] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8908aaf-da98-46f3-8fb3-7f8c913b55d0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.020245] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fe28ab-fbb1-4003-83cf-78bf0076990f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.025892] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-60890d02-1db8-4f26-b5d4-a982e66f054f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.035996] env[67015]: DEBUG oslo_vmware.api [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066169} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.036247] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1827.036431] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1827.036602] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1827.036773] env[67015]: INFO nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1827.038846] env[67015]: DEBUG nova.compute.claims [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1827.039031] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.039259] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.047926] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1827.216078] env[67015]: DEBUG oslo_vmware.rw_handles [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1827.275742] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9f9761-2a12-43f0-b74b-0fd934a57789 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.280537] env[67015]: DEBUG oslo_vmware.rw_handles [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1827.280712] env[67015]: DEBUG oslo_vmware.rw_handles [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1827.284303] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e82ad3-6310-4497-989a-2e7b6fe98ec5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.313644] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d275ff86-97f3-405f-bf18-9d4f6925e8a5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.320537] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b5eb3b-3095-4e73-bcab-d79f4ff52559 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.333496] env[67015]: DEBUG nova.compute.provider_tree [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1827.342567] env[67015]: DEBUG nova.scheduler.client.report [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1827.358731] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.359283] env[67015]: ERROR nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1827.359283] env[67015]: Faults: ['InvalidArgument'] [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Traceback (most recent call last): [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self.driver.spawn(context, instance, image_meta, [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self._fetch_image_if_missing(context, vi) [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] image_cache(vi, tmp_image_ds_loc) [ 1827.359283] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] vm_util.copy_virtual_disk( [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] session._wait_for_task(vmdk_copy_task) [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return self.wait_for_task(task_ref) [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return evt.wait() [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] result = hub.switch() [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] return self.greenlet.switch() [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1827.359635] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] self.f(*self.args, **self.kw) [ 1827.359985] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1827.359985] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] raise exceptions.translate_fault(task_info.error) [ 1827.359985] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1827.359985] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Faults: ['InvalidArgument'] [ 1827.359985] env[67015]: ERROR nova.compute.manager [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] [ 1827.359985] env[67015]: DEBUG nova.compute.utils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1827.364382] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Build of instance 6d104e2f-9924-4094-823d-a78c21acfc7b was re-scheduled: A specified parameter was not correct: fileType [ 1827.364382] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1827.364382] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1827.364382] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1827.364382] env[67015]: DEBUG nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1827.364693] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1827.655313] env[67015]: DEBUG nova.network.neutron [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.665905] env[67015]: INFO nova.compute.manager [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Took 0.30 seconds to deallocate network for instance. [ 1827.754151] env[67015]: INFO nova.scheduler.client.report [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleted allocations for instance 6d104e2f-9924-4094-823d-a78c21acfc7b [ 1827.775535] env[67015]: DEBUG oslo_concurrency.lockutils [None req-4d95e97d-1d5d-40da-9c2e-817f1a5d5dd0 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.847s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.776746] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.668s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.776888] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.777324] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.777324] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.779195] env[67015]: INFO nova.compute.manager [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Terminating instance [ 1827.780939] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.781108] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.781300] env[67015]: DEBUG nova.network.neutron [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1827.787412] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1827.808159] env[67015]: DEBUG nova.network.neutron [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1827.857094] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.857358] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.858857] env[67015]: INFO nova.compute.claims [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1827.980814] env[67015]: DEBUG nova.network.neutron [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.989587] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "refresh_cache-6d104e2f-9924-4094-823d-a78c21acfc7b" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.989969] env[67015]: DEBUG nova.compute.manager [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1827.990185] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1827.992720] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0fda8b0-b976-4b1e-90f8-1c509d756cbe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.001680] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b9e3dd-d5a1-4908-a026-2efb3208f061 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.032886] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6d104e2f-9924-4094-823d-a78c21acfc7b could not be found. [ 1828.033094] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1828.033274] env[67015]: INFO nova.compute.manager [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1828.033512] env[67015]: DEBUG oslo.service.loopingcall [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1828.033724] env[67015]: DEBUG nova.compute.manager [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1828.033818] env[67015]: DEBUG nova.network.neutron [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1828.050294] env[67015]: DEBUG nova.network.neutron [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1828.058184] env[67015]: DEBUG nova.network.neutron [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.064273] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fa2747-058c-4d9e-b6ea-9220f3324075 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.068625] env[67015]: INFO nova.compute.manager [-] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] Took 0.03 seconds to deallocate network for instance. [ 1828.075422] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44a5059-61e9-4f39-8708-a6efb5c33f52 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.108449] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051babde-4240-48db-b293-85a51b08cd74 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.115991] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9051cbe4-b3d8-4f48-b698-16c28ba6dbdf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.132558] env[67015]: DEBUG nova.compute.provider_tree [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.139830] env[67015]: DEBUG nova.scheduler.client.report [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1828.157314] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.157922] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1828.172773] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5a393502-cce0-4b47-9ae3-47c52dc9f44f tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.396s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.173861] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 53.361s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.173861] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d104e2f-9924-4094-823d-a78c21acfc7b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1828.174044] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "6d104e2f-9924-4094-823d-a78c21acfc7b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.189467] env[67015]: DEBUG nova.compute.utils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1828.190609] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1828.190771] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1828.197744] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1828.245286] env[67015]: DEBUG nova.policy [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '781a56d84a2f44899b24b9355ecb4712', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118c941baba942a5a240db9d80accb55', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1828.258022] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1828.282111] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1828.282372] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1828.282531] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1828.282712] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1828.282859] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1828.283013] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1828.283235] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1828.283398] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1828.283565] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1828.283731] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1828.283908] env[67015]: DEBUG nova.virt.hardware [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1828.284782] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673831c7-9fe2-41e9-ad76-f06308239b21 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.292518] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b105fe3-2fe5-40f1-92e2-8958603a1f7f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.699140] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Successfully created port: 0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1829.290493] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Successfully updated port: 0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1829.303800] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.304040] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquired lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.304322] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1829.346574] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1829.514432] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Updating instance_info_cache with network_info: [{"id": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "address": "fa:16:3e:b9:93:56", "network": {"id": "351c3ac7-e5d4-4a77-9d21-df1c74c708d7", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-153895109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118c941baba942a5a240db9d80accb55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d78d40b-97", "ovs_interfaceid": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.551330] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Releasing lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.552337] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance network_info: |[{"id": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "address": "fa:16:3e:b9:93:56", "network": {"id": "351c3ac7-e5d4-4a77-9d21-df1c74c708d7", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-153895109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118c941baba942a5a240db9d80accb55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d78d40b-97", "ovs_interfaceid": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1829.552930] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:93:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d78d40b-97c9-4b80-a731-5502ea004bd6', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1829.559546] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Creating folder: Project (118c941baba942a5a240db9d80accb55). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1829.560564] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d3adff8-904e-40d6-ad30-7ceb52fb52cd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.571227] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Created folder: Project (118c941baba942a5a240db9d80accb55) in parent group-v623108. [ 1829.571480] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Creating folder: Instances. Parent ref: group-v623218. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1829.571728] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78b83a67-667d-4715-9861-3cf9abf25c17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.580228] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Created folder: Instances in parent group-v623218. [ 1829.580445] env[67015]: DEBUG oslo.service.loopingcall [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1829.580617] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1829.580797] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc909106-f67f-4496-a17c-21e1953d449d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.600023] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1829.600023] env[67015]: value = "task-3114561" [ 1829.600023] env[67015]: _type = "Task" [ 1829.600023] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.608386] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114561, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.692592] env[67015]: DEBUG nova.compute.manager [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Received event network-vif-plugged-0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1829.692912] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Acquiring lock "232b89d8-08a1-45af-91e6-1dc979880009-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.693090] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Lock "232b89d8-08a1-45af-91e6-1dc979880009-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.693295] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Lock "232b89d8-08a1-45af-91e6-1dc979880009-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.693399] env[67015]: DEBUG nova.compute.manager [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] No waiting events found dispatching network-vif-plugged-0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1829.693589] env[67015]: WARNING nova.compute.manager [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Received unexpected event network-vif-plugged-0d78d40b-97c9-4b80-a731-5502ea004bd6 for instance with vm_state building and task_state spawning. [ 1829.693766] env[67015]: DEBUG nova.compute.manager [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Received event network-changed-0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1829.693924] env[67015]: DEBUG nova.compute.manager [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Refreshing instance network info cache due to event network-changed-0d78d40b-97c9-4b80-a731-5502ea004bd6. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1829.694122] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Acquiring lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.694258] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Acquired lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.694420] env[67015]: DEBUG nova.network.neutron [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Refreshing network info cache for port 0d78d40b-97c9-4b80-a731-5502ea004bd6 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1829.945159] env[67015]: DEBUG nova.network.neutron [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Updated VIF entry in instance network info cache for port 0d78d40b-97c9-4b80-a731-5502ea004bd6. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1829.945518] env[67015]: DEBUG nova.network.neutron [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Updating instance_info_cache with network_info: [{"id": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "address": "fa:16:3e:b9:93:56", "network": {"id": "351c3ac7-e5d4-4a77-9d21-df1c74c708d7", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-153895109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118c941baba942a5a240db9d80accb55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d78d40b-97", "ovs_interfaceid": "0d78d40b-97c9-4b80-a731-5502ea004bd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.955765] env[67015]: DEBUG oslo_concurrency.lockutils [req-52274224-5bc1-4c3c-ae48-931c4d3cb0cd req-3f90669f-5a7e-438d-bf0e-dc2d49242fcd service nova] Releasing lock "refresh_cache-232b89d8-08a1-45af-91e6-1dc979880009" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.110285] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114561, 'name': CreateVM_Task, 'duration_secs': 0.265675} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.110468] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1830.117388] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.117548] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.117868] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1830.118117] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afeb7cd2-8d3b-4ff4-9992-21c10eb0279d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.122667] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for the task: (returnval){ [ 1830.122667] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]520a7ccd-8622-1046-820a-3de6b44f2251" [ 1830.122667] env[67015]: _type = "Task" [ 1830.122667] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.130113] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]520a7ccd-8622-1046-820a-3de6b44f2251, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.633354] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.633690] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1830.633857] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.514441] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.514725] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.510515] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1857.514907] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.514497] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.514793] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.527307] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.527600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.527717] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.527874] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1858.528970] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3763154a-f605-46d4-9c28-da5bf90b1b1f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.537261] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe6ce52-b010-48fd-ba62-bd674268468c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.551152] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cad0e15-fa07-4726-af45-6fa0116eeb51 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.557282] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d978e90-ea0e-4faf-b9fe-7df3e55938b6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.585865] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181060MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1858.586029] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.586212] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.660160] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660326] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660457] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660580] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660701] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660820] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.660940] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.661071] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.661191] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.661305] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1858.672085] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1858.672297] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1858.672452] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1858.795164] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ec3f24-c112-4185-b1f3-e1c235278354 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.802534] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94e15c2-a534-47fb-835d-a9a3c3b22f72 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.833256] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb06b02-b6d8-49ab-a948-b63517e5faae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.840279] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eea2d1-aa33-437a-bf44-5e8dde9e9d0a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.853011] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1858.861623] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1858.874310] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1858.874474] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.288s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.869737] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1861.513918] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.513765] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.514205] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1866.516313] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1866.516608] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1866.516608] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1866.536805] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.536965] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537110] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537240] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537391] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537538] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537665] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537785] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.537907] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.538035] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1866.538164] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1876.244865] env[67015]: WARNING oslo_vmware.rw_handles [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1876.244865] env[67015]: ERROR oslo_vmware.rw_handles [ 1876.245602] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1876.247554] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1876.247802] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Copying Virtual Disk [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/d8507beb-1eb7-47ce-9ee8-1dcaad606bcf/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1876.248142] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56e0a619-3ec1-4fc5-b225-f0587ed8ef1e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.255555] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1876.255555] env[67015]: value = "task-3114562" [ 1876.255555] env[67015]: _type = "Task" [ 1876.255555] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.263279] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.765892] env[67015]: DEBUG oslo_vmware.exceptions [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1876.766191] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.766750] env[67015]: ERROR nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.766750] env[67015]: Faults: ['InvalidArgument'] [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Traceback (most recent call last): [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] yield resources [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self.driver.spawn(context, instance, image_meta, [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self._fetch_image_if_missing(context, vi) [ 1876.766750] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] image_cache(vi, tmp_image_ds_loc) [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] vm_util.copy_virtual_disk( [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] session._wait_for_task(vmdk_copy_task) [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return self.wait_for_task(task_ref) [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return evt.wait() [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] result = hub.switch() [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1876.767200] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return self.greenlet.switch() [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self.f(*self.args, **self.kw) [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] raise exceptions.translate_fault(task_info.error) [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Faults: ['InvalidArgument'] [ 1876.767573] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] [ 1876.767573] env[67015]: INFO nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Terminating instance [ 1876.768718] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.768930] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1876.769186] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fada4bf5-a80c-4d5b-9ae6-07ec1a338808 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.771276] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1876.771469] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1876.772187] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fd2720-106f-4d9a-b104-4d31b477cc90 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.779475] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1876.780346] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e802ecb-b742-457f-b832-d40ded0f4452 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.781677] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1876.781849] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1876.782493] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0bb02c7-33cb-46ea-a382-032d4d3033d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.787073] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for the task: (returnval){ [ 1876.787073] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5222169d-b863-4c1d-06f0-bf9cba019ced" [ 1876.787073] env[67015]: _type = "Task" [ 1876.787073] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.794067] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5222169d-b863-4c1d-06f0-bf9cba019ced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.850817] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1876.851051] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1876.851236] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleting the datastore file [datastore2] 944728f0-7db6-4cca-a51c-7acb5998cb12 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.851490] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3cb04cb-569f-4ed6-b65d-e76eae3360b4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.858231] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for the task: (returnval){ [ 1876.858231] env[67015]: value = "task-3114564" [ 1876.858231] env[67015]: _type = "Task" [ 1876.858231] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.865595] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.297434] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1877.297783] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Creating directory with path [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1877.297908] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69c227dc-0a74-4a16-8873-7d194756e820 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.313742] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Created directory with path [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1877.313942] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Fetch image to [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1877.314128] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1877.315146] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdeb398e-a555-45ec-a21f-52ab5913d3f7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.321188] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb261329-0fc4-4ef7-a127-6284d94b594b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.329960] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123d928c-06e4-4f1e-aa63-94e96d695f46 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.370269] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97df88a9-5f59-48f0-b76e-7c2703fc50bb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.378259] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5e061ad7-37ce-4ee0-85cf-58ffde763ced {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.379884] env[67015]: DEBUG oslo_vmware.api [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Task: {'id': task-3114564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099096} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.380143] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1877.380326] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1877.380496] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1877.380669] env[67015]: INFO nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1877.382810] env[67015]: DEBUG nova.compute.claims [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1877.382998] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.383245] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.403092] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1877.487134] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1877.551420] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1877.551630] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1877.627298] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76558ea4-bab2-43b0-8120-9fa2acc198ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.635751] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cdd45b-dec0-4f0d-b66d-9980d0ab6569 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.664429] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f709b8-2e31-428d-a02c-7a80ccb192df {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.670826] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72619e27-f7c2-4d98-a024-723163f46638 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.684144] env[67015]: DEBUG nova.compute.provider_tree [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.692264] env[67015]: DEBUG nova.scheduler.client.report [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1877.705537] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.706085] env[67015]: ERROR nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1877.706085] env[67015]: Faults: ['InvalidArgument'] [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Traceback (most recent call last): [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self.driver.spawn(context, instance, image_meta, [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self._fetch_image_if_missing(context, vi) [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] image_cache(vi, tmp_image_ds_loc) [ 1877.706085] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] vm_util.copy_virtual_disk( [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] session._wait_for_task(vmdk_copy_task) [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return self.wait_for_task(task_ref) [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return evt.wait() [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] result = hub.switch() [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] return self.greenlet.switch() [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1877.706460] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] self.f(*self.args, **self.kw) [ 1877.707156] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1877.707156] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] raise exceptions.translate_fault(task_info.error) [ 1877.707156] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1877.707156] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Faults: ['InvalidArgument'] [ 1877.707156] env[67015]: ERROR nova.compute.manager [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] [ 1877.707156] env[67015]: DEBUG nova.compute.utils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1877.708125] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Build of instance 944728f0-7db6-4cca-a51c-7acb5998cb12 was re-scheduled: A specified parameter was not correct: fileType [ 1877.708125] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1877.708488] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1877.708658] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1877.708828] env[67015]: DEBUG nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1877.708999] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1878.026698] env[67015]: DEBUG nova.network.neutron [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.049573] env[67015]: INFO nova.compute.manager [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Took 0.34 seconds to deallocate network for instance. [ 1878.175457] env[67015]: INFO nova.scheduler.client.report [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Deleted allocations for instance 944728f0-7db6-4cca-a51c-7acb5998cb12 [ 1878.202182] env[67015]: DEBUG oslo_concurrency.lockutils [None req-944aba50-aee0-4c27-b069-82c71e0a1d40 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 603.452s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.203497] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 406.467s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.203620] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Acquiring lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.203830] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.204043] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.207634] env[67015]: INFO nova.compute.manager [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Terminating instance [ 1878.211857] env[67015]: DEBUG nova.compute.manager [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1878.212078] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1878.212372] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-068fe8cc-962c-402e-a2be-07c2cf9e12e2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.222572] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4e2115-f103-4f74-bda0-ee9a0098c0f8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.234212] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1878.255454] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 944728f0-7db6-4cca-a51c-7acb5998cb12 could not be found. [ 1878.255662] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1878.255841] env[67015]: INFO nova.compute.manager [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1878.256140] env[67015]: DEBUG oslo.service.loopingcall [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.256390] env[67015]: DEBUG nova.compute.manager [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1878.256491] env[67015]: DEBUG nova.network.neutron [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1878.282417] env[67015]: DEBUG nova.network.neutron [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.287721] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.287957] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.289722] env[67015]: INFO nova.compute.claims [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1878.292585] env[67015]: INFO nova.compute.manager [-] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] Took 0.04 seconds to deallocate network for instance. [ 1878.411275] env[67015]: DEBUG oslo_concurrency.lockutils [None req-cf4d5891-6888-4ae6-bbdc-afec29923919 tempest-MultipleCreateTestJSON-340990970 tempest-MultipleCreateTestJSON-340990970-project-member] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.412229] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 103.599s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.412444] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 944728f0-7db6-4cca-a51c-7acb5998cb12] During sync_power_state the instance has a pending task (deleting). Skip. [ 1878.412942] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "944728f0-7db6-4cca-a51c-7acb5998cb12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.469129] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df37ce0-bab2-4c02-abc2-d0ca04821af2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.476997] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc5a482-9653-457a-ab12-80ac41605f07 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.506817] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63e5ffd-2031-49e7-8cbf-a5cb4f644812 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.513394] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a4045d-0a32-4697-b453-d51cedc8e415 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.525830] env[67015]: DEBUG nova.compute.provider_tree [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.533703] env[67015]: DEBUG nova.scheduler.client.report [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1878.551621] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.264s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.552118] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1878.589222] env[67015]: DEBUG nova.compute.utils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1878.590648] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1878.590775] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1878.599703] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1878.666867] env[67015]: DEBUG nova.policy [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe4a65d0e9ce492bad143896e47ed3be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c0714888b7f42978f7b641b843d6b24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1878.674352] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1878.712506] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1878.713010] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1878.713195] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.713387] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1878.713542] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.713694] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1878.713981] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1878.714178] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1878.714358] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1878.714526] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1878.714702] env[67015]: DEBUG nova.virt.hardware [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1878.715574] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597a9ce9-f4bc-4ff6-98aa-df040744d4ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.723592] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac58356-683f-4a49-90c4-49baadb241b9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.030022] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Successfully created port: b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1879.669486] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Successfully updated port: b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1879.685567] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.685814] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquired lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.685905] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1879.734233] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1879.906872] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Updating instance_info_cache with network_info: [{"id": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "address": "fa:16:3e:ec:b7:94", "network": {"id": "08af4d1c-d0ca-4cdf-b924-be4348400e4c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1444146998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c0714888b7f42978f7b641b843d6b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0979987-5b", "ovs_interfaceid": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.924411] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Releasing lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.924704] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance network_info: |[{"id": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "address": "fa:16:3e:ec:b7:94", "network": {"id": "08af4d1c-d0ca-4cdf-b924-be4348400e4c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1444146998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c0714888b7f42978f7b641b843d6b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0979987-5b", "ovs_interfaceid": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1879.925143] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b7:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0979987-5b8a-44fd-aea8-27b04b6b4840', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1879.932664] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Creating folder: Project (8c0714888b7f42978f7b641b843d6b24). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1879.933222] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29a31b29-e7ab-42ba-850d-ee9e801625f8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.943798] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Created folder: Project (8c0714888b7f42978f7b641b843d6b24) in parent group-v623108. [ 1879.943980] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Creating folder: Instances. Parent ref: group-v623221. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1879.944212] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40f0f8a9-a7f1-4973-9d08-54b5e7701a29 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.953877] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Created folder: Instances in parent group-v623221. [ 1879.954105] env[67015]: DEBUG oslo.service.loopingcall [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.954278] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1879.954455] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f676520-d05c-4078-a031-daaa3d405e62 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.972441] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1879.972441] env[67015]: value = "task-3114567" [ 1879.972441] env[67015]: _type = "Task" [ 1879.972441] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.979234] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114567, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.232329] env[67015]: DEBUG nova.compute.manager [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Received event network-vif-plugged-b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1880.232551] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Acquiring lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.232776] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.232949] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.233125] env[67015]: DEBUG nova.compute.manager [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] No waiting events found dispatching network-vif-plugged-b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1880.233312] env[67015]: WARNING nova.compute.manager [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Received unexpected event network-vif-plugged-b0979987-5b8a-44fd-aea8-27b04b6b4840 for instance with vm_state building and task_state spawning. [ 1880.233472] env[67015]: DEBUG nova.compute.manager [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Received event network-changed-b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1880.233648] env[67015]: DEBUG nova.compute.manager [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Refreshing instance network info cache due to event network-changed-b0979987-5b8a-44fd-aea8-27b04b6b4840. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1880.233832] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Acquiring lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.233995] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Acquired lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.234182] env[67015]: DEBUG nova.network.neutron [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Refreshing network info cache for port b0979987-5b8a-44fd-aea8-27b04b6b4840 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1880.483008] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114567, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.505844] env[67015]: DEBUG nova.network.neutron [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Updated VIF entry in instance network info cache for port b0979987-5b8a-44fd-aea8-27b04b6b4840. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1880.506228] env[67015]: DEBUG nova.network.neutron [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Updating instance_info_cache with network_info: [{"id": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "address": "fa:16:3e:ec:b7:94", "network": {"id": "08af4d1c-d0ca-4cdf-b924-be4348400e4c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1444146998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c0714888b7f42978f7b641b843d6b24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0979987-5b", "ovs_interfaceid": "b0979987-5b8a-44fd-aea8-27b04b6b4840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.516440] env[67015]: DEBUG oslo_concurrency.lockutils [req-0479a883-6c46-478a-b197-35870b2675a6 req-874bf29f-f973-415a-9d60-b9384b9250e9 service nova] Releasing lock "refresh_cache-8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.985538] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114567, 'name': CreateVM_Task, 'duration_secs': 0.663993} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.985538] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1880.985538] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.985538] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.985538] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1880.985968] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22e27a78-7b12-4919-9e05-966fc2eed96d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.990317] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for the task: (returnval){ [ 1880.990317] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5233a29c-b3c8-dee2-1577-f7ba03b27370" [ 1880.990317] env[67015]: _type = "Task" [ 1880.990317] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.997902] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5233a29c-b3c8-dee2-1577-f7ba03b27370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.501193] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.501478] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1881.501701] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.769731] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "c1049b71-7c9b-4772-a889-fee93a62cf05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.514722] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.516405] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.514802] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.509666] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.513428] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.525359] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.525627] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.525834] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.526035] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1918.527156] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8e3085-b8e3-4301-a369-38d5181ee93f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.535765] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73441256-3d47-4f74-b020-49448af19e9d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.549062] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b81dddd-0e0c-4faa-872f-b8263a700b1a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.554933] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5eba74-1cb4-4eab-ad01-d66d1552f568 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.584342] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181065MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1918.584491] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.584686] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.654115] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 41f47735-f679-4b30-8e30-f917dcf4db42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654285] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654411] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654532] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654649] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654768] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.654887] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.655017] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.655136] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.655250] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.655437] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1918.655574] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1918.775479] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8cdae2-82b8-407b-b4e7-02190381bc0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.782788] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1853649-5a53-4ab5-9e2c-bb55e8c3ccc5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.810965] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67456562-0163-4b59-8142-8ac2a6ee2a6a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.817527] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683090d3-f76f-44fe-bd27-f506d4fd1338 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.831656] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.839568] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1918.852423] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1918.852600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.853619] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.514606] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.514946] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.514990] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1926.262018] env[67015]: WARNING oslo_vmware.rw_handles [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.262018] env[67015]: ERROR oslo_vmware.rw_handles [ 1926.262773] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1926.264754] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1926.264976] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Copying Virtual Disk [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/31b15744-f5fa-4d06-8757-a8887ec5de07/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1926.265313] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97ff9b67-5f94-4da1-ae4d-1ae9147a6f32 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.274124] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for the task: (returnval){ [ 1926.274124] env[67015]: value = "task-3114568" [ 1926.274124] env[67015]: _type = "Task" [ 1926.274124] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.282019] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Task: {'id': task-3114568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.784285] env[67015]: DEBUG oslo_vmware.exceptions [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1926.784546] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.785099] env[67015]: ERROR nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1926.785099] env[67015]: Faults: ['InvalidArgument'] [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Traceback (most recent call last): [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] yield resources [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self.driver.spawn(context, instance, image_meta, [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self._fetch_image_if_missing(context, vi) [ 1926.785099] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] image_cache(vi, tmp_image_ds_loc) [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] vm_util.copy_virtual_disk( [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] session._wait_for_task(vmdk_copy_task) [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return self.wait_for_task(task_ref) [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return evt.wait() [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] result = hub.switch() [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1926.785566] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return self.greenlet.switch() [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self.f(*self.args, **self.kw) [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] raise exceptions.translate_fault(task_info.error) [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Faults: ['InvalidArgument'] [ 1926.786019] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] [ 1926.786019] env[67015]: INFO nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Terminating instance [ 1926.786949] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.787170] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1926.787399] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7f1be49-7ef3-41bd-8203-23e8e160d748 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.789444] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1926.789636] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1926.790338] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e262a021-b537-491c-a796-4631bdc98809 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.797432] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1926.798490] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a3568cb-b283-4eda-9d32-509976f165ac {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.799948] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1926.800148] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1926.800858] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d396c0b5-100a-4598-9204-14860d6d912f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.806182] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for the task: (returnval){ [ 1926.806182] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]526054ff-97ed-b5b2-8592-096e39b48cf8" [ 1926.806182] env[67015]: _type = "Task" [ 1926.806182] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.814366] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]526054ff-97ed-b5b2-8592-096e39b48cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.876611] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1926.876841] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1926.876948] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Deleting the datastore file [datastore2] 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.877235] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-748ab6df-3adb-4ca4-a9b7-99386b2a3510 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.883789] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for the task: (returnval){ [ 1926.883789] env[67015]: value = "task-3114570" [ 1926.883789] env[67015]: _type = "Task" [ 1926.883789] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.890981] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Task: {'id': task-3114570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.316421] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1927.316737] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Creating directory with path [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.316896] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5ea50de-cb86-46d0-ae93-8d7dfe6f87cc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.327389] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Created directory with path [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.327569] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Fetch image to [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1927.327771] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1927.328461] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ac8df8-9a74-40d4-abf0-1ba96f9323c5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.334737] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6928a5b-4d85-49a1-abcd-39fe66b0c853 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.343208] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd7762c-7618-4b22-ab69-794b8bd51d91 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.374032] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5377e87-6d2f-445f-8392-8e78e334910e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.379158] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f23c7287-65df-4678-bb4d-993cef8baf57 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.391656] env[67015]: DEBUG oslo_vmware.api [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Task: {'id': task-3114570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077769} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.391877] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1927.392094] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1927.392282] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1927.392461] env[67015]: INFO nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1927.394566] env[67015]: DEBUG nova.compute.claims [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1927.394696] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.394903] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.400509] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1927.520756] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1927.581870] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1927.582105] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1927.614821] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807bf032-fac0-4f8a-acd5-c015c560b436 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.621847] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e507604e-8f34-4f83-b6b2-44fd47282833 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.650644] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49313bbb-90ef-4ebc-8fcd-00dd379912fe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.656887] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d5287e-5528-4a1d-b550-8849867ed51f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.669951] env[67015]: DEBUG nova.compute.provider_tree [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.679699] env[67015]: DEBUG nova.scheduler.client.report [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1927.692937] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.693524] env[67015]: ERROR nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.693524] env[67015]: Faults: ['InvalidArgument'] [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Traceback (most recent call last): [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self.driver.spawn(context, instance, image_meta, [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self._fetch_image_if_missing(context, vi) [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] image_cache(vi, tmp_image_ds_loc) [ 1927.693524] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] vm_util.copy_virtual_disk( [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] session._wait_for_task(vmdk_copy_task) [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return self.wait_for_task(task_ref) [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return evt.wait() [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] result = hub.switch() [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] return self.greenlet.switch() [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1927.693948] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] self.f(*self.args, **self.kw) [ 1927.694399] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1927.694399] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] raise exceptions.translate_fault(task_info.error) [ 1927.694399] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.694399] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Faults: ['InvalidArgument'] [ 1927.694399] env[67015]: ERROR nova.compute.manager [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] [ 1927.694399] env[67015]: DEBUG nova.compute.utils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1927.695544] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Build of instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d was re-scheduled: A specified parameter was not correct: fileType [ 1927.695544] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1927.695908] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1927.696103] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1927.696277] env[67015]: DEBUG nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1927.696441] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1928.005525] env[67015]: DEBUG nova.network.neutron [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.017814] env[67015]: INFO nova.compute.manager [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Took 0.32 seconds to deallocate network for instance. [ 1928.113348] env[67015]: INFO nova.scheduler.client.report [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Deleted allocations for instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d [ 1928.137108] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22789515-dc93-48bd-bb2c-ca8f8291f32c tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 566.547s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.137396] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 370.800s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.137618] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Acquiring lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.137823] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.137991] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.140152] env[67015]: INFO nova.compute.manager [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Terminating instance [ 1928.141856] env[67015]: DEBUG nova.compute.manager [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1928.142133] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1928.142718] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cdc50c3-bffa-4261-93da-cf623b150fd4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.152248] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a7fc61-4c92-4987-a198-d7ba7605421c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.180335] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d could not be found. [ 1928.180530] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1928.180738] env[67015]: INFO nova.compute.manager [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1928.180972] env[67015]: DEBUG oslo.service.loopingcall [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1928.181211] env[67015]: DEBUG nova.compute.manager [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1928.181306] env[67015]: DEBUG nova.network.neutron [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1928.207166] env[67015]: DEBUG nova.network.neutron [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.215535] env[67015]: INFO nova.compute.manager [-] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] Took 0.03 seconds to deallocate network for instance. [ 1928.299182] env[67015]: DEBUG oslo_concurrency.lockutils [None req-95e089ce-e3a4-43e7-8205-d05c4a562acb tempest-ServersTestFqdnHostnames-703214732 tempest-ServersTestFqdnHostnames-703214732-project-member] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.300066] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 153.487s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.300261] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8f61fa9e-82b6-401a-a7e0-fe4a54561f8d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1928.300433] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "8f61fa9e-82b6-401a-a7e0-fe4a54561f8d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.514347] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1928.514714] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1928.514714] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1928.533827] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534025] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534123] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534279] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534413] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534540] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534664] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534785] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.534905] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1928.535102] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1953.945939] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "dea0f558-4d4b-41f4-9df9-c997835a628c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.946245] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.956561] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1954.005982] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.006262] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.007736] env[67015]: INFO nova.compute.claims [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1954.178881] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509f9945-195c-46d8-8646-f9aee5139cfb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.186952] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e1c7e6-a145-45ea-a84f-d3be519fbf70 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.218581] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec18846a-4d65-4519-8aa5-471ee4133491 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.226110] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068fbfc-46e7-4b42-80a6-11d5fcf2ed06 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.242581] env[67015]: DEBUG nova.compute.provider_tree [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.251768] env[67015]: DEBUG nova.scheduler.client.report [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1954.267545] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.268207] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1954.303125] env[67015]: DEBUG nova.compute.utils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1954.304489] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1954.304666] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1954.316521] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1954.371137] env[67015]: DEBUG nova.policy [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23c9c3ba466e4429b558586e3a1d5a5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9422713742645cdb90250d49182e210', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1954.388150] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1954.418215] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "232b89d8-08a1-45af-91e6-1dc979880009" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.424277] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1954.424508] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1954.424667] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1954.424850] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1954.424996] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1954.425386] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1954.425386] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1954.425520] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1954.425651] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1954.425812] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1954.425985] env[67015]: DEBUG nova.virt.hardware [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1954.427090] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a96b74-d2d5-4ad4-91ee-b8c6b8b4c637 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.434783] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf7bc21-ac3b-4c8c-8ba2-8906d2b61b8b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.678302] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Successfully created port: 83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.234349] env[67015]: DEBUG nova.compute.manager [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Received event network-vif-plugged-83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1955.234349] env[67015]: DEBUG oslo_concurrency.lockutils [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] Acquiring lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.234349] env[67015]: DEBUG oslo_concurrency.lockutils [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.234349] env[67015]: DEBUG oslo_concurrency.lockutils [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.235298] env[67015]: DEBUG nova.compute.manager [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] No waiting events found dispatching network-vif-plugged-83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1955.235298] env[67015]: WARNING nova.compute.manager [req-ed5ffc9c-09d8-4c66-aaec-ef74b418d659 req-7bd64c93-97ad-4212-9a45-370a253b3604 service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Received unexpected event network-vif-plugged-83affd97-be55-49d8-8a6b-7b80b8ebf4c7 for instance with vm_state building and task_state spawning. [ 1955.307725] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Successfully updated port: 83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1955.321582] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.321726] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquired lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.321868] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1955.366281] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1955.536863] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Updating instance_info_cache with network_info: [{"id": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "address": "fa:16:3e:f4:0d:06", "network": {"id": "97f89617-1b20-44c6-947e-9412a3a73a56", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1552635540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9422713742645cdb90250d49182e210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83affd97-be", "ovs_interfaceid": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.554341] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Releasing lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.554711] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance network_info: |[{"id": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "address": "fa:16:3e:f4:0d:06", "network": {"id": "97f89617-1b20-44c6-947e-9412a3a73a56", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1552635540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9422713742645cdb90250d49182e210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83affd97-be", "ovs_interfaceid": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1955.555176] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:0d:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83affd97-be55-49d8-8a6b-7b80b8ebf4c7', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.563030] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Creating folder: Project (d9422713742645cdb90250d49182e210). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1955.563662] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-deab5b7a-4b25-411c-8c3c-07074bde3fde {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.575780] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Created folder: Project (d9422713742645cdb90250d49182e210) in parent group-v623108. [ 1955.575982] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Creating folder: Instances. Parent ref: group-v623224. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1955.576237] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c24eefbe-5a46-4b54-bf02-02366113d9d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.587312] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Created folder: Instances in parent group-v623224. [ 1955.587730] env[67015]: DEBUG oslo.service.loopingcall [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.587899] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1955.587980] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec02f888-c6e1-48f7-a6f7-0b3822fefa85 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.607615] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1955.607615] env[67015]: value = "task-3114573" [ 1955.607615] env[67015]: _type = "Task" [ 1955.607615] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.616318] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114573, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.116762] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114573, 'name': CreateVM_Task, 'duration_secs': 0.299106} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.116976] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1956.117602] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.117768] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.118109] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1956.118359] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aff29a0-e2dd-4d9b-8c19-f0dfe0c1fb00 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.122840] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for the task: (returnval){ [ 1956.122840] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a0c35d-019d-6516-287e-94ff8ef181d8" [ 1956.122840] env[67015]: _type = "Task" [ 1956.122840] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.132871] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a0c35d-019d-6516-287e-94ff8ef181d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.633315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.633612] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.633780] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.258583] env[67015]: DEBUG nova.compute.manager [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Received event network-changed-83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1957.258790] env[67015]: DEBUG nova.compute.manager [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Refreshing instance network info cache due to event network-changed-83affd97-be55-49d8-8a6b-7b80b8ebf4c7. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1957.258998] env[67015]: DEBUG oslo_concurrency.lockutils [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] Acquiring lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.259160] env[67015]: DEBUG oslo_concurrency.lockutils [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] Acquired lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.259324] env[67015]: DEBUG nova.network.neutron [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Refreshing network info cache for port 83affd97-be55-49d8-8a6b-7b80b8ebf4c7 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1957.683085] env[67015]: DEBUG nova.network.neutron [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Updated VIF entry in instance network info cache for port 83affd97-be55-49d8-8a6b-7b80b8ebf4c7. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1957.683507] env[67015]: DEBUG nova.network.neutron [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Updating instance_info_cache with network_info: [{"id": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "address": "fa:16:3e:f4:0d:06", "network": {"id": "97f89617-1b20-44c6-947e-9412a3a73a56", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1552635540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9422713742645cdb90250d49182e210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83affd97-be", "ovs_interfaceid": "83affd97-be55-49d8-8a6b-7b80b8ebf4c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.693876] env[67015]: DEBUG oslo_concurrency.lockutils [req-595d8f88-0303-44b1-91da-fbfdb345c7f6 req-fa0b6b44-115e-419d-90df-244abc9d3b7a service nova] Releasing lock "refresh_cache-dea0f558-4d4b-41f4-9df9-c997835a628c" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.515092] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1975.960040] env[67015]: WARNING oslo_vmware.rw_handles [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1975.960040] env[67015]: ERROR oslo_vmware.rw_handles [ 1975.960040] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1975.962616] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1975.962851] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Copying Virtual Disk [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/fc236569-183f-4775-b912-77e48e21fa05/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1975.963164] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-919db661-85bb-4137-a9e3-c75c7cc290de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.971068] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for the task: (returnval){ [ 1975.971068] env[67015]: value = "task-3114574" [ 1975.971068] env[67015]: _type = "Task" [ 1975.971068] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.978305] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Task: {'id': task-3114574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.483712] env[67015]: DEBUG oslo_vmware.exceptions [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1976.483963] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.484438] env[67015]: ERROR nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1976.484438] env[67015]: Faults: ['InvalidArgument'] [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Traceback (most recent call last): [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] yield resources [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self.driver.spawn(context, instance, image_meta, [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self._fetch_image_if_missing(context, vi) [ 1976.484438] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] image_cache(vi, tmp_image_ds_loc) [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] vm_util.copy_virtual_disk( [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] session._wait_for_task(vmdk_copy_task) [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return self.wait_for_task(task_ref) [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return evt.wait() [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] result = hub.switch() [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1976.484864] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return self.greenlet.switch() [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self.f(*self.args, **self.kw) [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] raise exceptions.translate_fault(task_info.error) [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Faults: ['InvalidArgument'] [ 1976.485328] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] [ 1976.485328] env[67015]: INFO nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Terminating instance [ 1976.486368] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.486589] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.486837] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb2bc2e9-7a9a-4372-9712-287991409ae6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.489131] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1976.489388] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1976.490085] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6dfc34d-b3ce-4772-a2b0-35722655098f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.496944] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1976.497885] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a403476-51c1-44f1-a93c-6d886bdd6789 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.499311] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.499485] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1976.500138] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-391ddeab-6d52-4d2a-a5cc-f404487ffc1e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.504876] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for the task: (returnval){ [ 1976.504876] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52c6ace6-4b54-6b9c-e86b-09c596acffd8" [ 1976.504876] env[67015]: _type = "Task" [ 1976.504876] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.512015] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52c6ace6-4b54-6b9c-e86b-09c596acffd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.560935] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1976.561181] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1976.561366] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Deleting the datastore file [datastore2] 41f47735-f679-4b30-8e30-f917dcf4db42 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1976.561628] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcc1d660-348c-4410-be48-370404ccc100 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.567608] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for the task: (returnval){ [ 1976.567608] env[67015]: value = "task-3114576" [ 1976.567608] env[67015]: _type = "Task" [ 1976.567608] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.575069] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Task: {'id': task-3114576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.015396] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1977.015750] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Creating directory with path [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1977.015872] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8e0f55a-94a1-417f-8621-9b3c2b9a5737 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.028233] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Created directory with path [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1977.028405] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Fetch image to [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1977.028582] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1977.029294] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f857f2-5976-41b9-a79a-f596eef7eae0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.035493] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1efd199-53fa-4d12-94af-2f80a3055f98 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.044046] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17a956e-c7d1-4cc7-8df4-1d79a4808889 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.075695] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cf35f0-450e-4e45-93a2-bb8820480fbc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.083744] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9903b442-40df-433e-aaac-077bd384c86c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.085397] env[67015]: DEBUG oslo_vmware.api [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Task: {'id': task-3114576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072627} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.085633] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1977.085809] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1977.085978] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1977.086186] env[67015]: INFO nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1977.088275] env[67015]: DEBUG nova.compute.claims [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1977.088448] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.088657] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.108150] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1977.252384] env[67015]: DEBUG oslo_vmware.rw_handles [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1977.310028] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb74356-f876-4001-be77-4fde777bf512 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.314835] env[67015]: DEBUG oslo_vmware.rw_handles [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1977.315016] env[67015]: DEBUG oslo_vmware.rw_handles [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1977.318597] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcfec9b-45d4-4dae-8238-5bae3fb1112f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.349480] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77e0929-1c72-46a5-be3c-8414e7f5c0c6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.356737] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ddd6cb-4ad3-4935-9489-a03b48f1f68d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.369678] env[67015]: DEBUG nova.compute.provider_tree [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.378790] env[67015]: DEBUG nova.scheduler.client.report [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1977.398345] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.398345] env[67015]: ERROR nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1977.398345] env[67015]: Faults: ['InvalidArgument'] [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Traceback (most recent call last): [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self.driver.spawn(context, instance, image_meta, [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1977.398345] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self._fetch_image_if_missing(context, vi) [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] image_cache(vi, tmp_image_ds_loc) [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] vm_util.copy_virtual_disk( [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] session._wait_for_task(vmdk_copy_task) [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return self.wait_for_task(task_ref) [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return evt.wait() [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] result = hub.switch() [ 1977.398738] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] return self.greenlet.switch() [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] self.f(*self.args, **self.kw) [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] raise exceptions.translate_fault(task_info.error) [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Faults: ['InvalidArgument'] [ 1977.399107] env[67015]: ERROR nova.compute.manager [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] [ 1977.399107] env[67015]: DEBUG nova.compute.utils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1977.399943] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Build of instance 41f47735-f679-4b30-8e30-f917dcf4db42 was re-scheduled: A specified parameter was not correct: fileType [ 1977.399943] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1977.400272] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1977.400441] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1977.400608] env[67015]: DEBUG nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1977.400766] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1977.514064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.514318] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.697608] env[67015]: DEBUG nova.network.neutron [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.711306] env[67015]: INFO nova.compute.manager [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Took 0.31 seconds to deallocate network for instance. [ 1977.801030] env[67015]: INFO nova.scheduler.client.report [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Deleted allocations for instance 41f47735-f679-4b30-8e30-f917dcf4db42 [ 1977.821738] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5fb5e41e-4aba-4d6a-90ab-79eb1ab7ebc7 tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 670.916s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.822087] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 474.921s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.822416] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Acquiring lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.822668] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.822864] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.825160] env[67015]: INFO nova.compute.manager [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Terminating instance [ 1977.827276] env[67015]: DEBUG nova.compute.manager [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1977.827374] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1977.828046] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84fc9f48-a63b-4c68-b4d2-7e9fdf8c2a7d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.838488] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab7aa4f-584f-4cdd-aad2-fdeed33678c2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.868347] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41f47735-f679-4b30-8e30-f917dcf4db42 could not be found. [ 1977.868561] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1977.868738] env[67015]: INFO nova.compute.manager [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1977.868985] env[67015]: DEBUG oslo.service.loopingcall [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1977.869220] env[67015]: DEBUG nova.compute.manager [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1977.869316] env[67015]: DEBUG nova.network.neutron [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1977.890896] env[67015]: DEBUG nova.network.neutron [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.898807] env[67015]: INFO nova.compute.manager [-] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] Took 0.03 seconds to deallocate network for instance. [ 1977.985249] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b2f411ac-3827-45fa-8f3f-89d98f885dda tempest-ServerMetadataNegativeTestJSON-716627482 tempest-ServerMetadataNegativeTestJSON-716627482-project-member] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.986593] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 203.173s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.986593] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 41f47735-f679-4b30-8e30-f917dcf4db42] During sync_power_state the instance has a pending task (deleting). Skip. [ 1977.986593] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "41f47735-f679-4b30-8e30-f917dcf4db42" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.866373] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.509773] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.513450] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.513703] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.525416] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.525680] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.525870] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.526042] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1980.527147] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4440a89-29fb-4e56-ad0e-6333c0ad1569 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.535775] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064936ed-7d98-4aab-8630-98945341252a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.549153] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d08a303-0a24-493f-b537-7bb47c83f4d5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.555072] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc2e53d-27d2-4581-b482-f68ee7b0c6ac {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.583778] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1980.583938] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.584125] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.653163] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 199b0508-5b88-41b4-ae08-dcdabb656686 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.653347] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.653478] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.653623] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.653762] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.653875] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.654011] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.654161] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.654279] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1980.654469] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1980.654605] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1980.766334] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f44af7-caca-4992-b55d-e4c7ed0aa5ba {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.773747] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0352d298-3444-44fb-a160-245392668c81 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.805422] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67d37d6-fb35-499a-85c6-f01bbeae4c64 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.812482] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f06adb-637d-4066-b0b0-a6beb3e248ee {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.825330] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1980.833651] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1980.847396] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1980.847681] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.263s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.844294] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.513956] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.514340] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.514712] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1988.575055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "94751383-c885-4039-88b3-c1f6d3460e23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.575357] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "94751383-c885-4039-88b3-c1f6d3460e23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.587815] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1989.345840] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.346127] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.347672] env[67015]: INFO nova.compute.claims [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1989.514187] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1989.514371] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1989.514493] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1989.517772] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8cf802-3774-478d-962b-ebf9adb6ebb9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.525316] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59448734-4cca-4184-b6c0-63cec4196c17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.555884] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7078a091-f1a8-48f0-a6d1-5620d09df929 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.559893] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560057] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560196] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560321] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560444] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560566] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560685] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560802] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.560917] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.561059] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1989.561258] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1989.566423] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43a5a2d-a3f7-410f-8d47-7681488cfe22 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.579478] env[67015]: DEBUG nova.compute.provider_tree [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1989.587723] env[67015]: DEBUG nova.scheduler.client.report [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1989.603250] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.257s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.603736] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1989.634731] env[67015]: DEBUG nova.compute.utils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1989.635957] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1989.636138] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1989.647224] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1989.693648] env[67015]: DEBUG nova.policy [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c296928b2fac410abe4cf22099518f74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65b8edd6f91443aa8051a18bbd34cc29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 1989.706792] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1989.731118] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1989.731361] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1989.731620] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1989.731787] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1989.731935] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1989.732095] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1989.732305] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1989.732551] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1989.732723] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1989.732885] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1989.733067] env[67015]: DEBUG nova.virt.hardware [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1989.733986] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a957ad-81e1-45e6-9f41-a79e6c2934c1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.741588] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bee9ab-01a3-438f-83dd-93035d063a3c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.011272] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Successfully created port: 33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1990.592251] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Successfully updated port: 33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1990.605447] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.605826] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.605826] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1990.632305] env[67015]: DEBUG nova.compute.manager [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Received event network-vif-plugged-33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1990.632305] env[67015]: DEBUG oslo_concurrency.lockutils [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] Acquiring lock "94751383-c885-4039-88b3-c1f6d3460e23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.632305] env[67015]: DEBUG oslo_concurrency.lockutils [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] Lock "94751383-c885-4039-88b3-c1f6d3460e23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.632305] env[67015]: DEBUG oslo_concurrency.lockutils [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] Lock "94751383-c885-4039-88b3-c1f6d3460e23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.632611] env[67015]: DEBUG nova.compute.manager [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] No waiting events found dispatching network-vif-plugged-33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1990.632611] env[67015]: WARNING nova.compute.manager [req-84e2a227-654f-42c7-a2f3-77e2c2c296bf req-70887b84-142e-4632-953d-fdd7287cf534 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Received unexpected event network-vif-plugged-33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 for instance with vm_state building and task_state spawning. [ 1990.651157] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1990.808401] env[67015]: DEBUG nova.network.neutron [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Updating instance_info_cache with network_info: [{"id": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "address": "fa:16:3e:f2:ba:42", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33c64f2d-73", "ovs_interfaceid": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.819315] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.819634] env[67015]: DEBUG nova.compute.manager [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Instance network_info: |[{"id": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "address": "fa:16:3e:f2:ba:42", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33c64f2d-73", "ovs_interfaceid": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1990.820076] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:ba:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33c64f2d-73e5-45b4-ad35-06b7b6bd8a26', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1990.827686] env[67015]: DEBUG oslo.service.loopingcall [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1990.828156] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1990.828399] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df434b9f-0648-4299-baf6-86f5d6441cd5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.848852] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1990.848852] env[67015]: value = "task-3114577" [ 1990.848852] env[67015]: _type = "Task" [ 1990.848852] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.857207] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114577, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.359313] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114577, 'name': CreateVM_Task, 'duration_secs': 0.271053} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.359491] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1991.360124] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.360295] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.360636] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1991.360884] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a054fa09-230d-41f2-841c-42dbf84b3b42 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.365258] env[67015]: DEBUG oslo_vmware.api [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 1991.365258] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52422d0f-bda2-496d-3fd0-7fe8ae5f39c2" [ 1991.365258] env[67015]: _type = "Task" [ 1991.365258] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.372913] env[67015]: DEBUG oslo_vmware.api [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52422d0f-bda2-496d-3fd0-7fe8ae5f39c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.875905] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.876549] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1991.876549] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.763442] env[67015]: DEBUG nova.compute.manager [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Received event network-changed-33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1992.763651] env[67015]: DEBUG nova.compute.manager [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Refreshing instance network info cache due to event network-changed-33c64f2d-73e5-45b4-ad35-06b7b6bd8a26. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1992.763828] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] Acquiring lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.763979] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] Acquired lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.764154] env[67015]: DEBUG nova.network.neutron [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Refreshing network info cache for port 33c64f2d-73e5-45b4-ad35-06b7b6bd8a26 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1993.015079] env[67015]: DEBUG nova.network.neutron [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Updated VIF entry in instance network info cache for port 33c64f2d-73e5-45b4-ad35-06b7b6bd8a26. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1993.015453] env[67015]: DEBUG nova.network.neutron [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Updating instance_info_cache with network_info: [{"id": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "address": "fa:16:3e:f2:ba:42", "network": {"id": "9688d542-250f-4036-8015-7464f44aa4d6", "bridge": "br-int", "label": "tempest-ServersTestJSON-439582917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b8edd6f91443aa8051a18bbd34cc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33c64f2d-73", "ovs_interfaceid": "33c64f2d-73e5-45b4-ad35-06b7b6bd8a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.024757] env[67015]: DEBUG oslo_concurrency.lockutils [req-1b614551-fcc4-4000-9b69-e8506af3eb34 req-549bf5b1-51d5-4940-88fb-a70a309ad1e3 service nova] Releasing lock "refresh_cache-94751383-c885-4039-88b3-c1f6d3460e23" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.311368] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.311671] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.298897] env[67015]: WARNING oslo_vmware.rw_handles [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.298897] env[67015]: ERROR oslo_vmware.rw_handles [ 2026.299675] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2026.301741] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2026.301983] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Copying Virtual Disk [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/9bf5ec24-e604-461e-a1b4-ffa0a6e175b7/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2026.302348] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-112b0e9c-7c0d-4526-aac6-5456f7059b4e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.311205] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for the task: (returnval){ [ 2026.311205] env[67015]: value = "task-3114578" [ 2026.311205] env[67015]: _type = "Task" [ 2026.311205] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.318677] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Task: {'id': task-3114578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.820939] env[67015]: DEBUG oslo_vmware.exceptions [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2026.821281] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.821838] env[67015]: ERROR nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.821838] env[67015]: Faults: ['InvalidArgument'] [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Traceback (most recent call last): [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] yield resources [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self.driver.spawn(context, instance, image_meta, [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self._fetch_image_if_missing(context, vi) [ 2026.821838] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] image_cache(vi, tmp_image_ds_loc) [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] vm_util.copy_virtual_disk( [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] session._wait_for_task(vmdk_copy_task) [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return self.wait_for_task(task_ref) [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return evt.wait() [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] result = hub.switch() [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2026.822295] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return self.greenlet.switch() [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self.f(*self.args, **self.kw) [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] raise exceptions.translate_fault(task_info.error) [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Faults: ['InvalidArgument'] [ 2026.822744] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] [ 2026.822744] env[67015]: INFO nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Terminating instance [ 2026.823765] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.823977] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2026.824245] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07246afb-2f4b-47f6-b95c-2baea789d3b7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.826367] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2026.826560] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2026.827356] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c16911-64f8-4336-9772-ebff2bcecf1c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.833891] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2026.834145] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b153d7ae-a126-4bb3-b09e-3b16e55d17ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.836468] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2026.836639] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2026.837285] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53687d9c-40ae-4f00-8bb4-7295c6038f1c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.841457] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for the task: (returnval){ [ 2026.841457] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52104f31-cda7-e3d9-75fe-fc4f89507208" [ 2026.841457] env[67015]: _type = "Task" [ 2026.841457] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.848418] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52104f31-cda7-e3d9-75fe-fc4f89507208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.352507] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2027.352856] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Creating directory with path [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.353055] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c77168e5-661a-426e-8f99-52f0e4eac15a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.373606] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Created directory with path [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.373799] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Fetch image to [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2027.373955] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2027.374682] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667bdb06-b174-4521-9d81-dd1575e8a6aa {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.381052] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20caf13-9588-42d7-83fa-0487ac32f35d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.390685] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32853b-bafe-4d59-bda5-eee9c757fdfb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.419583] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412d9d99-9332-43ae-bc4f-021948130a4d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.424546] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c7cb975a-f432-40e5-a7c3-753c0fa25841 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.445754] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2027.464456] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2027.464675] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2027.464860] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Deleting the datastore file [datastore2] 199b0508-5b88-41b4-ae08-dcdabb656686 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2027.465132] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13f428e4-37a1-4a75-ab46-aeb27b3c75a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.470861] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for the task: (returnval){ [ 2027.470861] env[67015]: value = "task-3114580" [ 2027.470861] env[67015]: _type = "Task" [ 2027.470861] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.478180] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Task: {'id': task-3114580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.584586] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2027.647267] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2027.647483] env[67015]: DEBUG oslo_vmware.rw_handles [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2027.980835] env[67015]: DEBUG oslo_vmware.api [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Task: {'id': task-3114580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078124} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.981044] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2027.981283] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2027.981464] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2027.981636] env[67015]: INFO nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2027.983771] env[67015]: DEBUG nova.compute.claims [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2027.983942] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.984170] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.153604] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc25090-2dff-44d3-9be7-ce981d236ec2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.161458] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a47c4c7-c458-4df0-ab44-11a7cb3f7364 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.191426] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e5613e-f38f-4b9a-9f3e-929765ea833b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.198540] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eed44d-3904-4ad8-934a-6648f02b44d0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.211419] env[67015]: DEBUG nova.compute.provider_tree [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.219848] env[67015]: DEBUG nova.scheduler.client.report [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2028.235566] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.251s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.236103] env[67015]: ERROR nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2028.236103] env[67015]: Faults: ['InvalidArgument'] [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Traceback (most recent call last): [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self.driver.spawn(context, instance, image_meta, [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self._fetch_image_if_missing(context, vi) [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] image_cache(vi, tmp_image_ds_loc) [ 2028.236103] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] vm_util.copy_virtual_disk( [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] session._wait_for_task(vmdk_copy_task) [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return self.wait_for_task(task_ref) [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return evt.wait() [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] result = hub.switch() [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] return self.greenlet.switch() [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2028.236466] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] self.f(*self.args, **self.kw) [ 2028.236829] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2028.236829] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] raise exceptions.translate_fault(task_info.error) [ 2028.236829] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2028.236829] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Faults: ['InvalidArgument'] [ 2028.236829] env[67015]: ERROR nova.compute.manager [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] [ 2028.236829] env[67015]: DEBUG nova.compute.utils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2028.238179] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Build of instance 199b0508-5b88-41b4-ae08-dcdabb656686 was re-scheduled: A specified parameter was not correct: fileType [ 2028.238179] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2028.238543] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2028.238712] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2028.238882] env[67015]: DEBUG nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2028.239283] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2028.536971] env[67015]: DEBUG nova.network.neutron [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.549710] env[67015]: INFO nova.compute.manager [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Took 0.31 seconds to deallocate network for instance. [ 2028.635761] env[67015]: INFO nova.scheduler.client.report [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Deleted allocations for instance 199b0508-5b88-41b4-ae08-dcdabb656686 [ 2028.656909] env[67015]: DEBUG oslo_concurrency.lockutils [None req-31353219-7ed2-41e7-8ec6-0a7e1cf1a4c1 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 644.539s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.658108] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 448.357s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.658333] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Acquiring lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.658538] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.658704] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.660649] env[67015]: INFO nova.compute.manager [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Terminating instance [ 2028.662335] env[67015]: DEBUG nova.compute.manager [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2028.662547] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2028.663032] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2586cfb5-4b85-4abd-abad-0c011a5c34bc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.672360] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d03a73b-be22-4fdd-bccc-ae932fdeb447 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.683273] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2028.704529] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 199b0508-5b88-41b4-ae08-dcdabb656686 could not be found. [ 2028.704743] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2028.704923] env[67015]: INFO nova.compute.manager [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2028.705184] env[67015]: DEBUG oslo.service.loopingcall [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.705406] env[67015]: DEBUG nova.compute.manager [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2028.705500] env[67015]: DEBUG nova.network.neutron [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2028.728052] env[67015]: DEBUG nova.network.neutron [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.730413] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.730652] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.732060] env[67015]: INFO nova.compute.claims [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2028.736669] env[67015]: INFO nova.compute.manager [-] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] Took 0.03 seconds to deallocate network for instance. [ 2028.826074] env[67015]: DEBUG oslo_concurrency.lockutils [None req-35bc156e-5989-407c-94b7-dd13280ff8f3 tempest-ServerDiskConfigTestJSON-1446338823 tempest-ServerDiskConfigTestJSON-1446338823-project-member] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.827189] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 254.013s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.827426] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 199b0508-5b88-41b4-ae08-dcdabb656686] During sync_power_state the instance has a pending task (deleting). Skip. [ 2028.827685] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "199b0508-5b88-41b4-ae08-dcdabb656686" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.940546] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb16beb-fa9a-4f8d-a794-9940e9d83ded {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.948926] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28da582c-1642-4008-8e78-a41d6c1f1677 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.980600] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bcddae-0d5c-4e4f-a2a8-d5f19ccc3450 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.987785] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a01f95-90ed-496c-b687-de663d681d56 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.000598] env[67015]: DEBUG nova.compute.provider_tree [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.009185] env[67015]: DEBUG nova.scheduler.client.report [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2029.023489] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.041147] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "3c2b534d-6821-4313-87b8-f7cbb7592c3e" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.041426] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Lock "3c2b534d-6821-4313-87b8-f7cbb7592c3e" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.046362] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Lock "3c2b534d-6821-4313-87b8-f7cbb7592c3e" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.046830] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2029.084035] env[67015]: DEBUG nova.compute.utils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2029.086941] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2029.087145] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2029.097835] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2029.145499] env[67015]: DEBUG nova.policy [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cb99abf54f4519ac89a17283fdba40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c77a3bee9ba4a838663e8e3f07e11eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 2029.161168] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2029.186879] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2029.187139] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2029.187301] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2029.187480] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2029.187624] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2029.187771] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2029.187979] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2029.188495] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2029.188495] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2029.188495] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2029.189222] env[67015]: DEBUG nova.virt.hardware [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2029.189476] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8de0f35-fdce-4fec-b992-190376281afe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.197161] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cafc33-317e-4907-a9ae-432f2333820f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.435835] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Successfully created port: 8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2030.009725] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Successfully updated port: 8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2030.020738] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.020931] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquired lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.021053] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2030.065079] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2030.233976] env[67015]: DEBUG nova.network.neutron [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Updating instance_info_cache with network_info: [{"id": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "address": "fa:16:3e:c9:73:1b", "network": {"id": "7f91b5b3-4a7a-45d3-800e-749a7809428a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1171529689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c77a3bee9ba4a838663e8e3f07e11eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9f726d-a0", "ovs_interfaceid": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.246673] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Releasing lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.246988] env[67015]: DEBUG nova.compute.manager [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Instance network_info: |[{"id": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "address": "fa:16:3e:c9:73:1b", "network": {"id": "7f91b5b3-4a7a-45d3-800e-749a7809428a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1171529689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c77a3bee9ba4a838663e8e3f07e11eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9f726d-a0", "ovs_interfaceid": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2030.247413] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:73:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e9f726d-a051-4045-81bd-1f9592cbf5d6', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2030.255230] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Creating folder: Project (8c77a3bee9ba4a838663e8e3f07e11eb). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2030.255784] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3fbe3c4-1c20-409d-8484-6000d8bb1a95 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.266740] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Created folder: Project (8c77a3bee9ba4a838663e8e3f07e11eb) in parent group-v623108. [ 2030.266935] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Creating folder: Instances. Parent ref: group-v623228. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2030.267176] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-919c103a-dc4b-48ac-96a0-08dd624b08ca {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.276079] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Created folder: Instances in parent group-v623228. [ 2030.276386] env[67015]: DEBUG oslo.service.loopingcall [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2030.276575] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2030.276799] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-549d5c55-9a55-4093-9801-98f818e0790c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.296309] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2030.296309] env[67015]: value = "task-3114583" [ 2030.296309] env[67015]: _type = "Task" [ 2030.296309] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.303425] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114583, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.574293] env[67015]: DEBUG nova.compute.manager [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Received event network-vif-plugged-8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2030.574508] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Acquiring lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.574760] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.574976] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.575192] env[67015]: DEBUG nova.compute.manager [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] No waiting events found dispatching network-vif-plugged-8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2030.575393] env[67015]: WARNING nova.compute.manager [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Received unexpected event network-vif-plugged-8e9f726d-a051-4045-81bd-1f9592cbf5d6 for instance with vm_state building and task_state spawning. [ 2030.575597] env[67015]: DEBUG nova.compute.manager [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Received event network-changed-8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2030.575789] env[67015]: DEBUG nova.compute.manager [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Refreshing instance network info cache due to event network-changed-8e9f726d-a051-4045-81bd-1f9592cbf5d6. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 2030.576021] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Acquiring lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.576197] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Acquired lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.576391] env[67015]: DEBUG nova.network.neutron [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Refreshing network info cache for port 8e9f726d-a051-4045-81bd-1f9592cbf5d6 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2030.806097] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114583, 'name': CreateVM_Task, 'duration_secs': 0.286632} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.806227] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2030.806851] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.807029] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.807350] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2030.807620] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cfbf10c-d7cf-4078-bfb9-8696e8acff8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.811995] env[67015]: DEBUG oslo_vmware.api [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Waiting for the task: (returnval){ [ 2030.811995] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]522b2fe0-cec2-35b4-230d-d663d6c9bbec" [ 2030.811995] env[67015]: _type = "Task" [ 2030.811995] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.819168] env[67015]: DEBUG oslo_vmware.api [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]522b2fe0-cec2-35b4-230d-d663d6c9bbec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.819864] env[67015]: DEBUG nova.network.neutron [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Updated VIF entry in instance network info cache for port 8e9f726d-a051-4045-81bd-1f9592cbf5d6. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2030.820226] env[67015]: DEBUG nova.network.neutron [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Updating instance_info_cache with network_info: [{"id": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "address": "fa:16:3e:c9:73:1b", "network": {"id": "7f91b5b3-4a7a-45d3-800e-749a7809428a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1171529689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c77a3bee9ba4a838663e8e3f07e11eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9f726d-a0", "ovs_interfaceid": "8e9f726d-a051-4045-81bd-1f9592cbf5d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.829166] env[67015]: DEBUG oslo_concurrency.lockutils [req-2e87aa51-6d2e-4504-a16a-54ae6e9f0901 req-4017867b-fac4-42a1-9ced-08bffdbac413 service nova] Releasing lock "refresh_cache-6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.321848] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.322214] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2031.322363] env[67015]: DEBUG oslo_concurrency.lockutils [None req-c5b9d94b-43bd-4a38-b212-574b8df8692d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.513842] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.514524] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.514871] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2037.524405] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 2039.524685] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.524999] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.515055] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.509852] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.514604] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.526187] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.526397] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.526583] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.526734] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2042.527798] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee850089-a457-4e50-ac95-7f37e6e54e46 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.536398] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1768158-66c3-49c7-b02b-bfa6217d1310 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.549856] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5717bf66-7b4b-414d-b95a-66b5a077cc56 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.555737] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f57a417-16e1-4a1c-b434-d3d3537b01ff {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.583029] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181078MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2042.583169] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.583359] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.705818] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.705999] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706151] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706275] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706397] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706522] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706641] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706761] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706881] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.706997] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2042.707212] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2042.707349] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2042.722481] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2042.735442] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2042.735624] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2042.746193] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2042.763274] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2042.882742] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdd7e98-c4b5-44b0-ac77-eff6ed43b67b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.889988] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1102ca-6f8a-4550-a864-89beac1783cd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.918829] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3493a5a-ef1f-4de7-99d7-caa0a702616b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.925899] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1fd0b0-ba5b-4f0c-936b-7c246127af6a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.938943] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2042.947594] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2042.967204] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2042.967393] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.384s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.968550] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.514526] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.514903] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2049.514688] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.515144] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2049.515144] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2049.536711] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.536899] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.536992] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537137] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537262] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537385] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537508] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537628] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537748] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537868] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2049.537987] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2054.514606] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.514879] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 2054.524212] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.148128] env[67015]: WARNING oslo_vmware.rw_handles [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.148128] env[67015]: ERROR oslo_vmware.rw_handles [ 2076.148844] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2076.150525] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2076.150779] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Copying Virtual Disk [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/296c672c-816d-48e3-b687-41107621fa56/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2076.151081] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c17cf04-d688-42c2-a858-6bb3e7f35e62 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.159696] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for the task: (returnval){ [ 2076.159696] env[67015]: value = "task-3114584" [ 2076.159696] env[67015]: _type = "Task" [ 2076.159696] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.167903] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Task: {'id': task-3114584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.673953] env[67015]: DEBUG oslo_vmware.exceptions [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2076.674346] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.675181] env[67015]: ERROR nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2076.675181] env[67015]: Faults: ['InvalidArgument'] [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Traceback (most recent call last): [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] yield resources [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self.driver.spawn(context, instance, image_meta, [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self._fetch_image_if_missing(context, vi) [ 2076.675181] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] image_cache(vi, tmp_image_ds_loc) [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] vm_util.copy_virtual_disk( [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] session._wait_for_task(vmdk_copy_task) [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return self.wait_for_task(task_ref) [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return evt.wait() [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] result = hub.switch() [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2076.675643] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return self.greenlet.switch() [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self.f(*self.args, **self.kw) [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] raise exceptions.translate_fault(task_info.error) [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Faults: ['InvalidArgument'] [ 2076.676119] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] [ 2076.676119] env[67015]: INFO nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Terminating instance [ 2076.677838] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.678141] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2076.678462] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec86bd15-a717-4966-90ef-f4a4b8c86560 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.681641] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2076.681909] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2076.683013] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d30c38-5128-4928-b529-c058af814be7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.690665] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2076.690896] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd852d60-a22e-4237-ad47-d1b6287179de {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.693319] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2076.693492] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2076.694598] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92152637-1991-400e-abfe-0f3c6e7969ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.699530] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 2076.699530] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52f9bd07-2cd2-4b2c-90e3-bdd9f35360a4" [ 2076.699530] env[67015]: _type = "Task" [ 2076.699530] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.707183] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52f9bd07-2cd2-4b2c-90e3-bdd9f35360a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.759220] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2076.759478] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2076.759654] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Deleting the datastore file [datastore2] f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.759926] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f889f90a-42aa-4aec-a447-c354add134e6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.770126] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for the task: (returnval){ [ 2076.770126] env[67015]: value = "task-3114586" [ 2076.770126] env[67015]: _type = "Task" [ 2076.770126] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.778399] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Task: {'id': task-3114586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.209894] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2077.210281] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating directory with path [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.210361] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd954b0a-d7fd-4650-b42b-5a4f09da2f6f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.221292] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Created directory with path [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.221469] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Fetch image to [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2077.221631] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2077.222347] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afef420a-66af-4e3d-9c07-b7a790f445c0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.228442] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fc2e3e-6c54-4e31-9614-a8609b61b746 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.237044] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94acaef-1727-434a-a64b-70a3235e270a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.267245] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2118b91f-b02e-465e-a5cf-7b2c1dcdb167 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.274861] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33da6fb5-a4d3-4db2-b3e9-6263639e6341 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.278846] env[67015]: DEBUG oslo_vmware.api [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Task: {'id': task-3114586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096792} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.279369] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.279552] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2077.279719] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2077.279888] env[67015]: INFO nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2077.282011] env[67015]: DEBUG nova.compute.claims [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2077.282190] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.282408] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.300840] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2077.401054] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2077.463690] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2077.463885] env[67015]: DEBUG oslo_vmware.rw_handles [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2077.516739] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2076f834-527e-4014-8e9c-b2ee9b845e4c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.524350] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1279cbdd-6fae-4d1a-982d-803791d94248 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.553154] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a2a0df-be9d-4008-883c-51ac65fc7e52 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.559872] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51095d69-c870-4e68-9d51-fe5a197c6211 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.572260] env[67015]: DEBUG nova.compute.provider_tree [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2077.580592] env[67015]: DEBUG nova.scheduler.client.report [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2077.594601] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.595162] env[67015]: ERROR nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.595162] env[67015]: Faults: ['InvalidArgument'] [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Traceback (most recent call last): [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self.driver.spawn(context, instance, image_meta, [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self._fetch_image_if_missing(context, vi) [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] image_cache(vi, tmp_image_ds_loc) [ 2077.595162] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] vm_util.copy_virtual_disk( [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] session._wait_for_task(vmdk_copy_task) [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return self.wait_for_task(task_ref) [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return evt.wait() [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] result = hub.switch() [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] return self.greenlet.switch() [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2077.595636] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] self.f(*self.args, **self.kw) [ 2077.596079] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2077.596079] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] raise exceptions.translate_fault(task_info.error) [ 2077.596079] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.596079] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Faults: ['InvalidArgument'] [ 2077.596079] env[67015]: ERROR nova.compute.manager [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] [ 2077.596079] env[67015]: DEBUG nova.compute.utils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2077.597769] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Build of instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 was re-scheduled: A specified parameter was not correct: fileType [ 2077.597769] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2077.598141] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2077.598327] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2077.598527] env[67015]: DEBUG nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2077.598702] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2077.917430] env[67015]: DEBUG nova.network.neutron [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.928473] env[67015]: INFO nova.compute.manager [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Took 0.33 seconds to deallocate network for instance. [ 2078.030586] env[67015]: INFO nova.scheduler.client.report [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Deleted allocations for instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 [ 2078.054590] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5e878dbb-554f-48da-b7b8-fb5e3a9e2e04 tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 686.969s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.054590] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.743s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.054590] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.054912] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.054912] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.056913] env[67015]: INFO nova.compute.manager [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Terminating instance [ 2078.061603] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquiring lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.061603] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Acquired lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.061603] env[67015]: DEBUG nova.network.neutron [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2078.102073] env[67015]: DEBUG nova.network.neutron [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2078.203952] env[67015]: DEBUG nova.network.neutron [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.214316] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Releasing lock "refresh_cache-f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.214721] env[67015]: DEBUG nova.compute.manager [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2078.214913] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2078.215438] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-578491c5-7c87-46a8-912c-bdf766b63d41 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.224868] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a979ff22-b0b6-408c-b5b9-884b213afcc7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.252986] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9cb0b88-053a-4a6d-be59-5aa202b6a4f7 could not be found. [ 2078.253149] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2078.253362] env[67015]: INFO nova.compute.manager [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2078.253599] env[67015]: DEBUG oslo.service.loopingcall [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2078.253817] env[67015]: DEBUG nova.compute.manager [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2078.253914] env[67015]: DEBUG nova.network.neutron [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2078.270272] env[67015]: DEBUG nova.network.neutron [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2078.277595] env[67015]: DEBUG nova.network.neutron [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.285052] env[67015]: INFO nova.compute.manager [-] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] Took 0.03 seconds to deallocate network for instance. [ 2078.370059] env[67015]: DEBUG oslo_concurrency.lockutils [None req-5b03e291-1109-4d2c-afd7-be65f96edf4d tempest-ServerPasswordTestJSON-165492082 tempest-ServerPasswordTestJSON-165492082-project-member] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.316s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.371463] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 303.558s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.371680] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f9cb0b88-053a-4a6d-be59-5aa202b6a4f7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2078.371861] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "f9cb0b88-053a-4a6d-be59-5aa202b6a4f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.531919] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.515178] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2101.514366] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.514153] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2103.509997] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.514689] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.526972] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.527324] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.527607] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.527854] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2104.529482] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ab0520-4d77-4cf1-b651-3099f5d6f525 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.541371] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7237e718-77fc-476d-b49d-9d36adbce17f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.555540] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e037c9ca-8385-47a5-a838-98cca7885a10 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.561706] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e52cff-93da-4089-ab89-0e979def90c0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.589229] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2104.589366] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.589554] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.657077] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance f7de465c-7557-41d0-b71a-ad0872c93745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657254] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657383] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657508] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657627] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657746] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657863] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.657977] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.658107] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2104.658318] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2104.658473] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2104.759799] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0418e3-37a6-49a8-87e2-00be1526c824 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.767064] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4996070-270a-441c-a1e7-d2e66a26ced0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.797287] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bf7d9c-bc33-45f7-9acf-aba3dc7e2b3c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.804613] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b18803-9fb1-4da6-8b78-c933b3322bfe {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.817804] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2104.825515] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2104.839201] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2104.839385] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.250s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.839122] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2106.510101] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.514058] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.514259] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2111.514572] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2111.514937] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2111.514937] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2111.535098] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535260] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535388] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535512] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535637] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535761] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.535883] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.536011] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.536138] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2111.536259] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2123.694139] env[67015]: WARNING oslo_vmware.rw_handles [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2123.694139] env[67015]: ERROR oslo_vmware.rw_handles [ 2123.694791] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2123.696650] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2123.696903] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Copying Virtual Disk [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/c427a60a-aeae-476c-b957-f328908e9b8f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2123.697276] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbbeb20d-8e79-4146-9548-7a4daecd49d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.705404] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 2123.705404] env[67015]: value = "task-3114587" [ 2123.705404] env[67015]: _type = "Task" [ 2123.705404] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.713114] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.216191] env[67015]: DEBUG oslo_vmware.exceptions [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2124.216568] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2124.217152] env[67015]: ERROR nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2124.217152] env[67015]: Faults: ['InvalidArgument'] [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Traceback (most recent call last): [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] yield resources [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self.driver.spawn(context, instance, image_meta, [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self._fetch_image_if_missing(context, vi) [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2124.217152] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] image_cache(vi, tmp_image_ds_loc) [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] vm_util.copy_virtual_disk( [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] session._wait_for_task(vmdk_copy_task) [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return self.wait_for_task(task_ref) [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return evt.wait() [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] result = hub.switch() [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return self.greenlet.switch() [ 2124.217515] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self.f(*self.args, **self.kw) [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] raise exceptions.translate_fault(task_info.error) [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Faults: ['InvalidArgument'] [ 2124.217856] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] [ 2124.217856] env[67015]: INFO nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Terminating instance [ 2124.219273] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.219355] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2124.219545] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b2be2e3-e6f7-4716-b450-7a06369c151d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.221703] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2124.221895] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2124.222636] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddc6f94-e10c-4f15-8ac3-bc70f19645dd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.229387] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2124.229590] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64fcb2a8-5247-41bf-8185-82039b4b7491 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.231691] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2124.232045] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2124.232778] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edea8691-23e4-4b7b-805a-303d208173f9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.237133] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 2124.237133] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]5222d25d-6eee-4ebe-0946-fdf2830a85d3" [ 2124.237133] env[67015]: _type = "Task" [ 2124.237133] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.245026] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]5222d25d-6eee-4ebe-0946-fdf2830a85d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.299417] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2124.299643] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2124.299823] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleting the datastore file [datastore2] f7de465c-7557-41d0-b71a-ad0872c93745 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2124.300110] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f187ffe-8338-4280-a2a3-c19d4cd03e8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.306810] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for the task: (returnval){ [ 2124.306810] env[67015]: value = "task-3114589" [ 2124.306810] env[67015]: _type = "Task" [ 2124.306810] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.314738] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.748765] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2124.749060] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2124.749310] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42d8a5ba-ab9d-4e44-9a06-1a31080d893c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.760305] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2124.760496] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Fetch image to [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2124.760836] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2124.761408] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c23aba7-3886-40ba-abc3-1d9f6b9ca512 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.767877] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc1d08b-7213-4ec3-846a-cfbfc8132a51 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.776545] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df317b1-ee8f-4cd1-a848-cddb751b19d3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.806363] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c48d43c-c25c-47ca-82d8-cc15b1c04e10 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.817024] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-97402604-2ec6-499b-b6da-d64269ec7635 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.818666] env[67015]: DEBUG oslo_vmware.api [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Task: {'id': task-3114589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078627} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.818904] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2124.819096] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2124.819272] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2124.819444] env[67015]: INFO nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2124.821500] env[67015]: DEBUG nova.compute.claims [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2124.821673] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.821897] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.841076] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2124.900544] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2124.961956] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2124.962159] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2125.021723] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d17a247-c675-444d-a459-ead0099a6a2a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.030817] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6bbc3c-34bc-4f09-bc44-0151794a2234 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.059749] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe5eac4-fd9f-4414-b3d4-604e2e10e519 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.066693] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e1accc-ba12-4caf-aedb-8644f2978d28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.079287] env[67015]: DEBUG nova.compute.provider_tree [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2125.087978] env[67015]: DEBUG nova.scheduler.client.report [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2125.100815] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.279s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.101396] env[67015]: ERROR nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2125.101396] env[67015]: Faults: ['InvalidArgument'] [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Traceback (most recent call last): [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self.driver.spawn(context, instance, image_meta, [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self._fetch_image_if_missing(context, vi) [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] image_cache(vi, tmp_image_ds_loc) [ 2125.101396] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] vm_util.copy_virtual_disk( [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] session._wait_for_task(vmdk_copy_task) [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return self.wait_for_task(task_ref) [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return evt.wait() [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] result = hub.switch() [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] return self.greenlet.switch() [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2125.101779] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] self.f(*self.args, **self.kw) [ 2125.102108] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2125.102108] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] raise exceptions.translate_fault(task_info.error) [ 2125.102108] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2125.102108] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Faults: ['InvalidArgument'] [ 2125.102108] env[67015]: ERROR nova.compute.manager [instance: f7de465c-7557-41d0-b71a-ad0872c93745] [ 2125.102108] env[67015]: DEBUG nova.compute.utils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2125.103404] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Build of instance f7de465c-7557-41d0-b71a-ad0872c93745 was re-scheduled: A specified parameter was not correct: fileType [ 2125.103404] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2125.103787] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2125.103968] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2125.104158] env[67015]: DEBUG nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2125.104356] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2125.392788] env[67015]: DEBUG nova.network.neutron [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.403971] env[67015]: INFO nova.compute.manager [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Took 0.30 seconds to deallocate network for instance. [ 2125.490227] env[67015]: INFO nova.scheduler.client.report [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Deleted allocations for instance f7de465c-7557-41d0-b71a-ad0872c93745 [ 2125.512474] env[67015]: DEBUG oslo_concurrency.lockutils [None req-f572488f-4057-42e1-9a8c-6a79d41bf7e6 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.739s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.512697] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 372.975s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.512932] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Acquiring lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.513152] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.513319] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.515390] env[67015]: INFO nova.compute.manager [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Terminating instance [ 2125.517182] env[67015]: DEBUG nova.compute.manager [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2125.517400] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2125.518040] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d533847-d54a-4a6f-9149-9e4d4fd69cea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.527285] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c72cf13-40ec-43b5-af98-ecbf41c755cf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.557213] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7de465c-7557-41d0-b71a-ad0872c93745 could not be found. [ 2125.557687] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2125.557687] env[67015]: INFO nova.compute.manager [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2125.557873] env[67015]: DEBUG oslo.service.loopingcall [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2125.558451] env[67015]: DEBUG nova.compute.manager [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2125.558451] env[67015]: DEBUG nova.network.neutron [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2125.582399] env[67015]: DEBUG nova.network.neutron [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.590375] env[67015]: INFO nova.compute.manager [-] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] Took 0.03 seconds to deallocate network for instance. [ 2125.675886] env[67015]: DEBUG oslo_concurrency.lockutils [None req-18d07193-75b1-4200-9c85-be29f82e5e21 tempest-ImagesTestJSON-284644961 tempest-ImagesTestJSON-284644961-project-member] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.676946] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 350.863s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.677142] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: f7de465c-7557-41d0-b71a-ad0872c93745] During sync_power_state the instance has a pending task (deleting). Skip. [ 2125.677320] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "f7de465c-7557-41d0-b71a-ad0872c93745" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.134413] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "dea0f558-4d4b-41f4-9df9-c997835a628c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.513708] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.514942] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2162.514524] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.515412] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.515715] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.527707] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.527927] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.528117] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.528280] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2164.529409] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17b0e2d-60d3-407f-951a-f5dce736ea28 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.538158] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5279748f-290a-4b83-8cb2-d23291c06f9b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.552867] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef120288-f5a1-4995-8192-1098ef500b8d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.558939] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db07088-0e3a-4550-8eb7-74bd3719f5d5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.586799] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181066MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2164.586937] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.587148] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.657542] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 4d61f178-b532-4ddb-958f-68723d041497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.657718] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.657848] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.657970] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.658133] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.658244] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.658371] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.658491] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2164.658674] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2164.658812] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2164.753998] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf133cd-7cc1-40f6-b8b9-403b4e63062a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.761621] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdef3d68-332c-4bfa-ac6c-0ff4b7ff3c67 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.792341] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc580d10-e1ef-40c5-99d3-83f3af60d3b2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.799658] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0621fe91-6058-48e6-bb7a-f81233d87ba5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.812381] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2164.820512] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2164.833883] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2164.834072] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.247s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.828126] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2167.514653] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.514219] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.514588] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2171.031190] env[67015]: WARNING oslo_vmware.rw_handles [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2171.031190] env[67015]: ERROR oslo_vmware.rw_handles [ 2171.031751] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2171.033554] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2171.033812] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Copying Virtual Disk [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/69d08b01-5dc1-4260-8506-8c4e034eb61b/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2171.034114] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-145541a0-962f-441a-b5f6-27f0b9e02894 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.043456] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 2171.043456] env[67015]: value = "task-3114590" [ 2171.043456] env[67015]: _type = "Task" [ 2171.043456] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.051178] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.514529] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2171.514711] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2171.514832] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2171.533044] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533155] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533282] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533414] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533543] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533668] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533790] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.533911] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2171.534046] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2171.553471] env[67015]: DEBUG oslo_vmware.exceptions [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2171.553734] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.554310] env[67015]: ERROR nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2171.554310] env[67015]: Faults: ['InvalidArgument'] [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] Traceback (most recent call last): [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] yield resources [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self.driver.spawn(context, instance, image_meta, [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self._fetch_image_if_missing(context, vi) [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2171.554310] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] image_cache(vi, tmp_image_ds_loc) [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] vm_util.copy_virtual_disk( [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] session._wait_for_task(vmdk_copy_task) [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return self.wait_for_task(task_ref) [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return evt.wait() [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] result = hub.switch() [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return self.greenlet.switch() [ 2171.554620] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self.f(*self.args, **self.kw) [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] raise exceptions.translate_fault(task_info.error) [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] Faults: ['InvalidArgument'] [ 2171.554900] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] [ 2171.554900] env[67015]: INFO nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Terminating instance [ 2171.556847] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.556847] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2171.556847] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-495cc599-62ab-478d-82e5-ce005381c9aa {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.559006] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2171.559209] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2171.559920] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec8db0b-c101-46a5-9a8f-3e977dbb1859 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.566753] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2171.566970] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-237cfb40-8d1c-4ea4-945a-e126f002883d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.569083] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2171.569296] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2171.570675] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-677d8613-c012-4c93-8b94-0568fa901ab4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.575397] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for the task: (returnval){ [ 2171.575397] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]523fd65e-8b8f-9ceb-d23d-2997efe19e01" [ 2171.575397] env[67015]: _type = "Task" [ 2171.575397] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.582298] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]523fd65e-8b8f-9ceb-d23d-2997efe19e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.633228] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2171.633440] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2171.633627] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleting the datastore file [datastore2] 4d61f178-b532-4ddb-958f-68723d041497 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2171.633884] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0839c1e4-b115-4f16-9436-5d983af34560 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.640078] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 2171.640078] env[67015]: value = "task-3114592" [ 2171.640078] env[67015]: _type = "Task" [ 2171.640078] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.647219] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.085575] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2172.085852] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Creating directory with path [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2172.086084] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-decb4912-3773-4bab-bfde-a7ab3e4d0ad1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.098093] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Created directory with path [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2172.098297] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Fetch image to [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2172.098471] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2172.099222] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1626a058-8a9b-4f01-979a-9e6fb6a0f2a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.105581] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd389c2-05b5-4d67-91fa-74cabcdc80e4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.115322] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3445216-4093-44ee-bdf5-486e7823dc07 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.147588] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72464e1e-1c3d-489d-88cc-9c58b7c16e5d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.153977] env[67015]: DEBUG oslo_vmware.api [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': task-3114592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078018} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.155314] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2172.155512] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2172.155684] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2172.155855] env[67015]: INFO nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2172.157606] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7938cb93-0432-4e00-ab10-6827a8ce5c9a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.159382] env[67015]: DEBUG nova.compute.claims [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2172.159552] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2172.159761] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.180999] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2172.301527] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465cd685-606d-472e-87c7-3b3334a73be6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.308775] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5525064-b7b7-4f8c-82e5-7dfb91e7f5c7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.312340] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2172.396464] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf4e414-5ed0-41f9-8323-5ac170960ae5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.399357] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2172.399531] env[67015]: DEBUG oslo_vmware.rw_handles [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2172.404799] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93abddea-a5a0-40c8-b34d-c80f95ee551a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.417928] env[67015]: DEBUG nova.compute.provider_tree [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2172.426521] env[67015]: DEBUG nova.scheduler.client.report [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2172.439775] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.440335] env[67015]: ERROR nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2172.440335] env[67015]: Faults: ['InvalidArgument'] [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] Traceback (most recent call last): [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self.driver.spawn(context, instance, image_meta, [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self._fetch_image_if_missing(context, vi) [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] image_cache(vi, tmp_image_ds_loc) [ 2172.440335] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] vm_util.copy_virtual_disk( [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] session._wait_for_task(vmdk_copy_task) [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return self.wait_for_task(task_ref) [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return evt.wait() [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] result = hub.switch() [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] return self.greenlet.switch() [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2172.440611] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] self.f(*self.args, **self.kw) [ 2172.440884] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2172.440884] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] raise exceptions.translate_fault(task_info.error) [ 2172.440884] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2172.440884] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] Faults: ['InvalidArgument'] [ 2172.440884] env[67015]: ERROR nova.compute.manager [instance: 4d61f178-b532-4ddb-958f-68723d041497] [ 2172.441153] env[67015]: DEBUG nova.compute.utils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2172.442493] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Build of instance 4d61f178-b532-4ddb-958f-68723d041497 was re-scheduled: A specified parameter was not correct: fileType [ 2172.442493] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2172.442868] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2172.443062] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2172.443325] env[67015]: DEBUG nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2172.443517] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2172.743041] env[67015]: DEBUG nova.network.neutron [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.755245] env[67015]: INFO nova.compute.manager [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Took 0.31 seconds to deallocate network for instance. [ 2172.846735] env[67015]: INFO nova.scheduler.client.report [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Deleted allocations for instance 4d61f178-b532-4ddb-958f-68723d041497 [ 2172.869715] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc542c51-c2ba-4f43-b2ea-e2641c24cde6 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.626s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.870019] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "4d61f178-b532-4ddb-958f-68723d041497" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 398.056s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.870271] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 4d61f178-b532-4ddb-958f-68723d041497] During sync_power_state the instance has a pending task (spawning). Skip. [ 2172.870451] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "4d61f178-b532-4ddb-958f-68723d041497" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.870983] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.579s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.871253] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "4d61f178-b532-4ddb-958f-68723d041497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2172.871477] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.871672] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.873923] env[67015]: INFO nova.compute.manager [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Terminating instance [ 2172.875858] env[67015]: DEBUG nova.compute.manager [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2172.876097] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2172.876681] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05856ecd-57f8-49e0-ad9f-f896c52316c2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.886687] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b272fb5-6669-44c9-9a6f-28b926f48e09 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.914840] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d61f178-b532-4ddb-958f-68723d041497 could not be found. [ 2172.915066] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2172.915287] env[67015]: INFO nova.compute.manager [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2172.915540] env[67015]: DEBUG oslo.service.loopingcall [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2172.915758] env[67015]: DEBUG nova.compute.manager [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2172.915857] env[67015]: DEBUG nova.network.neutron [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2172.938102] env[67015]: DEBUG nova.network.neutron [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.946669] env[67015]: INFO nova.compute.manager [-] [instance: 4d61f178-b532-4ddb-958f-68723d041497] Took 0.03 seconds to deallocate network for instance. [ 2173.028027] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ffe77df2-b44e-4b73-bc36-e12a238eb823 tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Lock "4d61f178-b532-4ddb-958f-68723d041497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.605537] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "6d85e398-6c75-4311-8e23-32d811e211f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.605865] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Lock "6d85e398-6c75-4311-8e23-32d811e211f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.616966] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2181.670587] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.670845] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.672436] env[67015]: INFO nova.compute.claims [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2181.860087] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1856d2-f276-4aa8-baa6-0be58ff271a1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.869189] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a3c396-e37b-4f51-91a6-c3f3f1e344f7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.900707] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc40709b-1ec4-485e-b66e-03b49c4b1976 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.908405] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d29c39c-48fb-4fe6-ada7-fcbe9b69af1a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.921821] env[67015]: DEBUG nova.compute.provider_tree [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2181.931204] env[67015]: DEBUG nova.scheduler.client.report [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2181.945876] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.275s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.946502] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2181.984182] env[67015]: DEBUG nova.compute.utils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2181.986039] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2181.986164] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2181.996135] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2182.044890] env[67015]: DEBUG nova.policy [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b1f8e67ce7944fc98f04fabf70483176', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46faf87b244440448f57a88bcd8a46aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 2182.084648] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2182.109566] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2182.109807] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2182.109966] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2182.110165] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2182.110317] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2182.110469] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2182.110710] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2182.110917] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2182.111112] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2182.111281] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2182.111454] env[67015]: DEBUG nova.virt.hardware [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2182.112309] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ab2565-0101-433e-afaa-35f9ad2b62b4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.120509] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9ef962-74d5-4ebf-a6db-5c532aecf094 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.358466] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Successfully created port: a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2182.944845] env[67015]: DEBUG nova.compute.manager [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Received event network-vif-plugged-a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2182.945118] env[67015]: DEBUG oslo_concurrency.lockutils [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] Acquiring lock "6d85e398-6c75-4311-8e23-32d811e211f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.945335] env[67015]: DEBUG oslo_concurrency.lockutils [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] Lock "6d85e398-6c75-4311-8e23-32d811e211f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.945502] env[67015]: DEBUG oslo_concurrency.lockutils [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] Lock "6d85e398-6c75-4311-8e23-32d811e211f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.945676] env[67015]: DEBUG nova.compute.manager [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] No waiting events found dispatching network-vif-plugged-a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2182.945845] env[67015]: WARNING nova.compute.manager [req-8c4a186f-d89b-464a-a167-f87e31dda8ce req-7b73ef6e-3406-4888-b3ce-51f7a2989e69 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Received unexpected event network-vif-plugged-a03bda3d-12de-43cf-b809-92a684bb8b00 for instance with vm_state building and task_state spawning. [ 2182.955362] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Successfully updated port: a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2182.966600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.966746] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquired lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.966893] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2183.010406] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2183.177469] env[67015]: DEBUG nova.network.neutron [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Updating instance_info_cache with network_info: [{"id": "a03bda3d-12de-43cf-b809-92a684bb8b00", "address": "fa:16:3e:e2:29:f7", "network": {"id": "f654e6d4-90a1-420e-9ca2-c85fb055db0b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-662388528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46faf87b244440448f57a88bcd8a46aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa03bda3d-12", "ovs_interfaceid": "a03bda3d-12de-43cf-b809-92a684bb8b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.187991] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Releasing lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.188278] env[67015]: DEBUG nova.compute.manager [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Instance network_info: |[{"id": "a03bda3d-12de-43cf-b809-92a684bb8b00", "address": "fa:16:3e:e2:29:f7", "network": {"id": "f654e6d4-90a1-420e-9ca2-c85fb055db0b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-662388528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46faf87b244440448f57a88bcd8a46aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa03bda3d-12", "ovs_interfaceid": "a03bda3d-12de-43cf-b809-92a684bb8b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2183.188687] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:29:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a03bda3d-12de-43cf-b809-92a684bb8b00', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2183.196100] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Creating folder: Project (46faf87b244440448f57a88bcd8a46aa). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2183.196567] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-363834fe-949d-456c-a278-35ae46f31e4a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.207148] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Created folder: Project (46faf87b244440448f57a88bcd8a46aa) in parent group-v623108. [ 2183.207328] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Creating folder: Instances. Parent ref: group-v623231. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2183.207537] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3683c7f6-f335-49ad-a686-4ed637b441d5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.215524] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Created folder: Instances in parent group-v623231. [ 2183.215744] env[67015]: DEBUG oslo.service.loopingcall [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2183.215915] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2183.216111] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2dd44310-b5a7-46a6-9fda-9738785b1aae {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.233652] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2183.233652] env[67015]: value = "task-3114595" [ 2183.233652] env[67015]: _type = "Task" [ 2183.233652] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.240609] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114595, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.743949] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114595, 'name': CreateVM_Task, 'duration_secs': 0.260377} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.744159] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2183.744846] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.745040] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.745414] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2183.745697] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47882a4c-a597-4b96-b9f0-a4eea9cf27e4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.749962] env[67015]: DEBUG oslo_vmware.api [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Waiting for the task: (returnval){ [ 2183.749962] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a97ff4-8a32-3de9-89a9-093f94469e80" [ 2183.749962] env[67015]: _type = "Task" [ 2183.749962] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.758308] env[67015]: DEBUG oslo_vmware.api [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a97ff4-8a32-3de9-89a9-093f94469e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.120736] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7242164b-4595-46b2-93db-c06e46db842d tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquiring lock "94751383-c885-4039-88b3-c1f6d3460e23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.260281] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.260548] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2184.260764] env[67015]: DEBUG oslo_concurrency.lockutils [None req-1f6b2ed2-686b-4d46-b853-10568ca728df tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.969202] env[67015]: DEBUG nova.compute.manager [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Received event network-changed-a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2184.969202] env[67015]: DEBUG nova.compute.manager [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Refreshing instance network info cache due to event network-changed-a03bda3d-12de-43cf-b809-92a684bb8b00. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 2184.969736] env[67015]: DEBUG oslo_concurrency.lockutils [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] Acquiring lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.969893] env[67015]: DEBUG oslo_concurrency.lockutils [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] Acquired lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.970072] env[67015]: DEBUG nova.network.neutron [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Refreshing network info cache for port a03bda3d-12de-43cf-b809-92a684bb8b00 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2185.217180] env[67015]: DEBUG nova.network.neutron [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Updated VIF entry in instance network info cache for port a03bda3d-12de-43cf-b809-92a684bb8b00. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2185.217541] env[67015]: DEBUG nova.network.neutron [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Updating instance_info_cache with network_info: [{"id": "a03bda3d-12de-43cf-b809-92a684bb8b00", "address": "fa:16:3e:e2:29:f7", "network": {"id": "f654e6d4-90a1-420e-9ca2-c85fb055db0b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-662388528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46faf87b244440448f57a88bcd8a46aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa03bda3d-12", "ovs_interfaceid": "a03bda3d-12de-43cf-b809-92a684bb8b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.226782] env[67015]: DEBUG oslo_concurrency.lockutils [req-53bfe1a6-1585-47ab-8b5a-9ff395b6c102 req-eec74285-4cd5-4f0b-8cf4-bc42bdea47e5 service nova] Releasing lock "refresh_cache-6d85e398-6c75-4311-8e23-32d811e211f6" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2214.179115] env[67015]: DEBUG oslo_concurrency.lockutils [None req-736f21a3-7365-41f4-8383-dc0cea72456d tempest-ServerGroupTestJSON-1686141157 tempest-ServerGroupTestJSON-1686141157-project-member] Acquiring lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.515007] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.720360] env[67015]: WARNING oslo_vmware.rw_handles [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2219.720360] env[67015]: ERROR oslo_vmware.rw_handles [ 2219.720977] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2219.722639] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2219.722878] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Copying Virtual Disk [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/8ccbfae5-eb9a-4280-900a-c00a891bcb73/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2219.723182] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07209c58-d75f-4ab9-a430-6b60faea41d2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.731189] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for the task: (returnval){ [ 2219.731189] env[67015]: value = "task-3114596" [ 2219.731189] env[67015]: _type = "Task" [ 2219.731189] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.740108] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Task: {'id': task-3114596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.241439] env[67015]: DEBUG oslo_vmware.exceptions [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2220.241776] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.242345] env[67015]: ERROR nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2220.242345] env[67015]: Faults: ['InvalidArgument'] [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Traceback (most recent call last): [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] yield resources [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self.driver.spawn(context, instance, image_meta, [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self._fetch_image_if_missing(context, vi) [ 2220.242345] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] image_cache(vi, tmp_image_ds_loc) [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] vm_util.copy_virtual_disk( [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] session._wait_for_task(vmdk_copy_task) [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return self.wait_for_task(task_ref) [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return evt.wait() [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] result = hub.switch() [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2220.242676] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return self.greenlet.switch() [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self.f(*self.args, **self.kw) [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] raise exceptions.translate_fault(task_info.error) [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Faults: ['InvalidArgument'] [ 2220.242985] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] [ 2220.242985] env[67015]: INFO nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Terminating instance [ 2220.244295] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.244506] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2220.244750] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7694943-cc07-4ef6-a930-555d786d8325 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.246875] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2220.247097] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2220.247772] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31bc9a0-5a19-4204-9042-5fb6483ea02b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.254573] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2220.254774] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-462f8fd6-0b56-4c46-be2c-3617b350ba14 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.256836] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2220.257029] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2220.257930] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fab8049-8930-4c5c-81a0-71250ad5e964 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.262401] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for the task: (returnval){ [ 2220.262401] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]523906c2-4b74-0c64-5b9d-3302331b5e8c" [ 2220.262401] env[67015]: _type = "Task" [ 2220.262401] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.269996] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]523906c2-4b74-0c64-5b9d-3302331b5e8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.326386] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2220.326616] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2220.326790] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Deleting the datastore file [datastore2] 6daf9c76-9471-43ec-9dd6-aaa43efc391b {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2220.327063] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4032d13-2501-451e-a816-52ca54126f03 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.333109] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for the task: (returnval){ [ 2220.333109] env[67015]: value = "task-3114598" [ 2220.333109] env[67015]: _type = "Task" [ 2220.333109] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.340771] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Task: {'id': task-3114598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.772946] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2220.773332] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Creating directory with path [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2220.773449] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1640466d-17c8-410a-8db2-2a2cc9ae9212 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.785118] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Created directory with path [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2220.785310] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Fetch image to [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2220.785478] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2220.786224] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc2c8a4-9c59-4b21-b5e8-15da12f86c30 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.792508] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b551c4-0a07-47f3-bc28-db0d911cea5a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.803264] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed200189-1e81-402e-8d77-948cc1ee30fd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.852339] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8707ab40-9dab-41bf-a261-ec003e975bec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.861251] env[67015]: DEBUG oslo_vmware.api [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Task: {'id': task-3114598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069616} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.862646] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2220.862833] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2220.863010] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2220.863191] env[67015]: INFO nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2220.864963] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc3fb4a4-01b3-4c5a-a6b7-b7752f2d5435 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.866863] env[67015]: DEBUG nova.compute.claims [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2220.867050] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.867267] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.889035] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2220.939612] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2220.999989] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2221.000237] env[67015]: DEBUG oslo_vmware.rw_handles [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2221.059963] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69ffa40-3125-4985-9049-3a5947b824f2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.067618] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4a27e3-011a-4152-8ca5-c6544521fa17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.096327] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aca7902-01f8-45a5-92dd-99e1a59291b0 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.102880] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f1580f-a797-48de-b16f-cf50a95430ea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.115084] env[67015]: DEBUG nova.compute.provider_tree [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2221.123758] env[67015]: DEBUG nova.scheduler.client.report [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2221.137722] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.270s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.138272] env[67015]: ERROR nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2221.138272] env[67015]: Faults: ['InvalidArgument'] [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Traceback (most recent call last): [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self.driver.spawn(context, instance, image_meta, [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self._fetch_image_if_missing(context, vi) [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] image_cache(vi, tmp_image_ds_loc) [ 2221.138272] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] vm_util.copy_virtual_disk( [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] session._wait_for_task(vmdk_copy_task) [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return self.wait_for_task(task_ref) [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return evt.wait() [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] result = hub.switch() [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] return self.greenlet.switch() [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2221.138564] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] self.f(*self.args, **self.kw) [ 2221.138837] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2221.138837] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] raise exceptions.translate_fault(task_info.error) [ 2221.138837] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2221.138837] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Faults: ['InvalidArgument'] [ 2221.138837] env[67015]: ERROR nova.compute.manager [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] [ 2221.139137] env[67015]: DEBUG nova.compute.utils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2221.140429] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Build of instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b was re-scheduled: A specified parameter was not correct: fileType [ 2221.140429] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2221.140807] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2221.140979] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2221.141167] env[67015]: DEBUG nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2221.141523] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2221.449241] env[67015]: DEBUG nova.network.neutron [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.461348] env[67015]: INFO nova.compute.manager [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Took 0.32 seconds to deallocate network for instance. [ 2221.514707] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2221.551705] env[67015]: INFO nova.scheduler.client.report [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Deleted allocations for instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b [ 2221.570662] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d629c20c-08de-483a-87c3-ef5acd4df538 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 601.685s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.570662] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 446.756s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.570662] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] During sync_power_state the instance has a pending task (spawning). Skip. [ 2221.570885] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.571391] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 405.303s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.571391] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Acquiring lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.571568] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.572708] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.578647] env[67015]: INFO nova.compute.manager [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Terminating instance [ 2221.579281] env[67015]: DEBUG nova.compute.manager [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2221.579462] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2221.579650] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-686d130b-6253-4273-959c-9d93d9b3e0c7 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.588452] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70fa308-86a7-4a23-956a-2fcac2074079 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.615732] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6daf9c76-9471-43ec-9dd6-aaa43efc391b could not be found. [ 2221.615945] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2221.616171] env[67015]: INFO nova.compute.manager [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2221.616422] env[67015]: DEBUG oslo.service.loopingcall [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2221.616655] env[67015]: DEBUG nova.compute.manager [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2221.616752] env[67015]: DEBUG nova.network.neutron [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2221.639462] env[67015]: DEBUG nova.network.neutron [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.647863] env[67015]: INFO nova.compute.manager [-] [instance: 6daf9c76-9471-43ec-9dd6-aaa43efc391b] Took 0.03 seconds to deallocate network for instance. [ 2221.735938] env[67015]: DEBUG oslo_concurrency.lockutils [None req-b4cfd69f-b7dc-4e5d-a09d-86bac8dac4c0 tempest-DeleteServersTestJSON-462503479 tempest-DeleteServersTestJSON-462503479-project-member] Lock "6daf9c76-9471-43ec-9dd6-aaa43efc391b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.514509] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2226.515064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2226.515064] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2226.526529] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.526743] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.526911] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.527109] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2226.528191] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dd10ab-a209-422a-b6f6-10811249c754 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.536997] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026addd9-b73e-4f04-8820-d761451d6914 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.551166] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bd6fd2-78cd-40c1-af61-820beee72851 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.557424] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d689e-e0e4-4308-89ad-480824625639 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.587548] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181057MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2226.587643] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.587830] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.646491] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance c1049b71-7c9b-4772-a889-fee93a62cf05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.646644] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.646774] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.646899] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.647034] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.647208] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.647319] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d85e398-6c75-4311-8e23-32d811e211f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.647517] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2226.647665] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2226.737123] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceddb061-9eb0-42d6-adc0-a60df5911abb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.744648] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb4c5b4-97d3-4bb6-8a9b-fc0dfd6f65cd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.774047] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657febcc-0767-41b2-947f-62a585864f96 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.780958] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec53288-e953-4920-a7f9-052d15d27890 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.794717] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2226.802555] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2226.818305] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2226.818516] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.231s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.813186] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.513653] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.509352] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.528668] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.528668] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2233.514568] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.514970] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2233.514970] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2233.532052] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532216] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532346] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532478] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532605] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532730] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532854] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2233.532976] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2266.384105] env[67015]: WARNING oslo_vmware.rw_handles [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2266.384105] env[67015]: ERROR oslo_vmware.rw_handles [ 2266.384726] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2266.386720] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2266.387024] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Copying Virtual Disk [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/a3852014-c06f-4128-b852-82f312e1f7f1/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2266.387316] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb731958-a390-4d44-8ab9-6b7ac57bac22 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.395423] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for the task: (returnval){ [ 2266.395423] env[67015]: value = "task-3114599" [ 2266.395423] env[67015]: _type = "Task" [ 2266.395423] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.402665] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Task: {'id': task-3114599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.905676] env[67015]: DEBUG oslo_vmware.exceptions [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2266.905911] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2266.906514] env[67015]: ERROR nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.906514] env[67015]: Faults: ['InvalidArgument'] [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Traceback (most recent call last): [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] yield resources [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self.driver.spawn(context, instance, image_meta, [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self._fetch_image_if_missing(context, vi) [ 2266.906514] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] image_cache(vi, tmp_image_ds_loc) [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] vm_util.copy_virtual_disk( [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] session._wait_for_task(vmdk_copy_task) [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return self.wait_for_task(task_ref) [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return evt.wait() [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] result = hub.switch() [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2266.906886] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return self.greenlet.switch() [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self.f(*self.args, **self.kw) [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] raise exceptions.translate_fault(task_info.error) [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Faults: ['InvalidArgument'] [ 2266.907254] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] [ 2266.907254] env[67015]: INFO nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Terminating instance [ 2266.908481] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.908693] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2266.908932] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26ef5da0-a3f9-4b85-905c-b4db9d60c0d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.912449] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2266.912648] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2266.913374] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea68f00f-67f7-498f-b92d-357c6c7a6876 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.916909] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2266.917101] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2266.918099] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b309f6a-7259-4b47-a4df-836b1105839c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.921900] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2266.922401] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-627ba6e0-08ab-4ba5-8d1e-fb4654391fe4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.924568] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for the task: (returnval){ [ 2266.924568] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52a1b9b5-d9dd-e69f-bc7d-7c244df09ba5" [ 2266.924568] env[67015]: _type = "Task" [ 2266.924568] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.932608] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52a1b9b5-d9dd-e69f-bc7d-7c244df09ba5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.998371] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2266.998595] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2266.998803] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Deleting the datastore file [datastore2] c1049b71-7c9b-4772-a889-fee93a62cf05 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2266.999206] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10aaacd2-ef0c-48bf-8129-412e95e850a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.004723] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for the task: (returnval){ [ 2267.004723] env[67015]: value = "task-3114601" [ 2267.004723] env[67015]: _type = "Task" [ 2267.004723] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.012130] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Task: {'id': task-3114601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.434912] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2267.435210] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Creating directory with path [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2267.435390] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a262297d-2da4-4104-8c96-bf09eb1f0676 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.446492] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Created directory with path [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2267.446684] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Fetch image to [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2267.446897] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2267.447602] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6dab29-410f-4c41-88b0-5cda7c3c16a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.453892] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8847b0e-5d0c-4512-8097-6241e2cb99ea {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.463661] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5aec0e4-7256-44a2-8df4-f72137e771ed {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.493567] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1186dbff-764f-4624-be01-ab5352777cd9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.498914] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8e16042b-6c27-4f98-82a6-25605d9367fd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.513426] env[67015]: DEBUG oslo_vmware.api [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Task: {'id': task-3114601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060574} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.513672] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2267.513857] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2267.514042] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2267.514243] env[67015]: INFO nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2267.516490] env[67015]: DEBUG nova.compute.claims [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2267.518116] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.518116] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.520908] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2267.573528] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2267.633871] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2267.634086] env[67015]: DEBUG oslo_vmware.rw_handles [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2267.692892] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3691b98-fb08-4499-9ee6-c3fe29f745a8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.700209] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c02cf3-93b1-4475-aacc-87e8c5b322ad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.730927] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a6e042-3f85-4d17-a898-446649f0f2a2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.738297] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506f7a8f-f579-4c14-9b33-65a456837513 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.751146] env[67015]: DEBUG nova.compute.provider_tree [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.760121] env[67015]: DEBUG nova.scheduler.client.report [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2267.772943] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.256s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.773512] env[67015]: ERROR nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.773512] env[67015]: Faults: ['InvalidArgument'] [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Traceback (most recent call last): [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self.driver.spawn(context, instance, image_meta, [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self._fetch_image_if_missing(context, vi) [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] image_cache(vi, tmp_image_ds_loc) [ 2267.773512] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] vm_util.copy_virtual_disk( [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] session._wait_for_task(vmdk_copy_task) [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return self.wait_for_task(task_ref) [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return evt.wait() [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] result = hub.switch() [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] return self.greenlet.switch() [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2267.773869] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] self.f(*self.args, **self.kw) [ 2267.774237] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2267.774237] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] raise exceptions.translate_fault(task_info.error) [ 2267.774237] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.774237] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Faults: ['InvalidArgument'] [ 2267.774237] env[67015]: ERROR nova.compute.manager [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] [ 2267.774237] env[67015]: DEBUG nova.compute.utils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2267.775660] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Build of instance c1049b71-7c9b-4772-a889-fee93a62cf05 was re-scheduled: A specified parameter was not correct: fileType [ 2267.775660] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2267.776041] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2267.776247] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2267.776447] env[67015]: DEBUG nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2267.776616] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2268.220560] env[67015]: DEBUG nova.network.neutron [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.232613] env[67015]: INFO nova.compute.manager [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Took 0.46 seconds to deallocate network for instance. [ 2268.322943] env[67015]: INFO nova.scheduler.client.report [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Deleted allocations for instance c1049b71-7c9b-4772-a889-fee93a62cf05 [ 2268.341801] env[67015]: DEBUG oslo_concurrency.lockutils [None req-dc96947c-62ea-4d76-b6c1-a4019d9548ff tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 563.747s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.342071] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 367.573s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.342296] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Acquiring lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.342537] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.342710] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.344562] env[67015]: INFO nova.compute.manager [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Terminating instance [ 2268.346432] env[67015]: DEBUG nova.compute.manager [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2268.346627] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2268.347135] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17ae7b4f-e537-429b-8965-7365e45cb0d9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.356706] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dd46bf-95f4-45b0-af3f-a5df942fb326 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.384503] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1049b71-7c9b-4772-a889-fee93a62cf05 could not be found. [ 2268.384729] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2268.384911] env[67015]: INFO nova.compute.manager [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2268.385179] env[67015]: DEBUG oslo.service.loopingcall [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2268.385393] env[67015]: DEBUG nova.compute.manager [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2268.385488] env[67015]: DEBUG nova.network.neutron [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2268.407319] env[67015]: DEBUG nova.network.neutron [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.414793] env[67015]: INFO nova.compute.manager [-] [instance: c1049b71-7c9b-4772-a889-fee93a62cf05] Took 0.03 seconds to deallocate network for instance. [ 2268.500898] env[67015]: DEBUG oslo_concurrency.lockutils [None req-0839856c-6f46-40a4-9a14-cb19ed290274 tempest-ServerRescueTestJSON-1505290448 tempest-ServerRescueTestJSON-1505290448-project-member] Lock "c1049b71-7c9b-4772-a889-fee93a62cf05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.514335] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.514470] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.514840] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2287.515511] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2287.528216] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.528455] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.528749] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.528913] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2287.530013] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8c62f6-4c03-4559-9b1d-576a5da36dc5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.538738] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e979b9-7437-4709-9f62-a7689d146ced {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.552039] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92845620-9c19-4e71-a58d-e4586f56c6d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.557814] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf973934-b4bb-4c26-b1e9-ec901882c7bc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.585718] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181052MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2287.585851] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.586050] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.647035] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 232b89d8-08a1-45af-91e6-1dc979880009 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647035] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647035] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647302] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647349] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647468] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d85e398-6c75-4311-8e23-32d811e211f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2287.647645] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2287.647783] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2287.722075] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c530a3-11b4-4249-afd4-9db287c56689 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.729279] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe4e2d2-94be-40fa-9772-578f21255ad9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.758749] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e58bd3-256a-4e96-90dd-f8c72690f8cb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.765745] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06db5cd-a4a6-4125-9b6a-4f297e90235b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.778237] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2287.786453] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2287.799361] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2287.799537] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.213s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.798466] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.510461] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.514141] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.514110] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.514462] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2293.514462] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2293.530515] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.530713] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.530799] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.530970] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.531133] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.531264] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2293.531388] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2293.531855] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.531996] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2313.771061] env[67015]: WARNING oslo_vmware.rw_handles [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2313.771061] env[67015]: ERROR oslo_vmware.rw_handles [ 2313.771600] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2313.773770] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2313.774047] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Copying Virtual Disk [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/1d821e02-75e5-4330-b78e-89dbc0e0b208/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2313.774337] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7771f6b-c9b1-4158-a91c-cfa764034a88 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.782609] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for the task: (returnval){ [ 2313.782609] env[67015]: value = "task-3114602" [ 2313.782609] env[67015]: _type = "Task" [ 2313.782609] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.790255] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Task: {'id': task-3114602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.293350] env[67015]: DEBUG oslo_vmware.exceptions [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2314.293668] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2314.294272] env[67015]: ERROR nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2314.294272] env[67015]: Faults: ['InvalidArgument'] [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Traceback (most recent call last): [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] yield resources [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self.driver.spawn(context, instance, image_meta, [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self._fetch_image_if_missing(context, vi) [ 2314.294272] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] image_cache(vi, tmp_image_ds_loc) [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] vm_util.copy_virtual_disk( [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] session._wait_for_task(vmdk_copy_task) [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return self.wait_for_task(task_ref) [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return evt.wait() [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] result = hub.switch() [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2314.294586] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return self.greenlet.switch() [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self.f(*self.args, **self.kw) [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] raise exceptions.translate_fault(task_info.error) [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Faults: ['InvalidArgument'] [ 2314.294891] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] [ 2314.294891] env[67015]: INFO nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Terminating instance [ 2314.296206] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2314.296425] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2314.296663] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-baa2e58a-ddc8-4aaf-aa52-554b651af21f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.299050] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2314.299273] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2314.299989] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1e5e8e-e50f-443c-8dd0-6c459bb97c92 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.306512] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2314.306712] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-176c3135-9f8a-4266-a99c-d685e4d7e501 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.308778] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2314.308993] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2314.309905] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f0b2bf0-d96b-421e-8fc4-44776a9f1ec5 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.315606] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for the task: (returnval){ [ 2314.315606] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52934703-0c6b-3615-4dd0-967e5f7ae032" [ 2314.315606] env[67015]: _type = "Task" [ 2314.315606] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.322364] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52934703-0c6b-3615-4dd0-967e5f7ae032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.374151] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2314.374391] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2314.374546] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Deleting the datastore file [datastore2] 232b89d8-08a1-45af-91e6-1dc979880009 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2314.374851] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ff63765-d8dc-40cf-861a-4c0a7f89fa3e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.380698] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for the task: (returnval){ [ 2314.380698] env[67015]: value = "task-3114604" [ 2314.380698] env[67015]: _type = "Task" [ 2314.380698] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.388217] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Task: {'id': task-3114604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.825905] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2314.827798] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Creating directory with path [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2314.827798] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8d6937d-1f26-49ae-85d4-9d4468d14bd2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.837865] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Created directory with path [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2314.838058] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Fetch image to [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2314.838236] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2314.838941] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03bbfe4-3d72-43f3-bac4-780fad6382e1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.845186] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cca7f7-db6b-44d0-b237-8c5688dee96b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.853639] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb38f956-4827-4d76-b7fc-6af8cfddbcd4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.885708] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667aace3-35d8-4b7f-a99f-feb86a2f6bdf {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.892103] env[67015]: DEBUG oslo_vmware.api [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Task: {'id': task-3114604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082492} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.893443] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2314.893629] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2314.893800] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2314.893973] env[67015]: INFO nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2314.895748] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ec0fa831-7a83-4c04-a636-103dced3ffa3 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.897520] env[67015]: DEBUG nova.compute.claims [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2314.897734] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2314.897969] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2314.921066] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2314.978202] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2315.046224] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2315.046457] env[67015]: DEBUG oslo_vmware.rw_handles [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2315.077711] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3024fe45-5401-48fb-9c3c-b57694d22d94 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.086936] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b6e4b3-2a29-40d5-b1e2-274d2e4d39be {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.131953] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410e6dec-0766-4e70-822f-6393f678fbed {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.138606] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88bb2ce-86c5-4eb2-8f58-0ff6f5d06a3d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.150774] env[67015]: DEBUG nova.compute.provider_tree [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2315.159019] env[67015]: DEBUG nova.scheduler.client.report [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2315.172362] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.172886] env[67015]: ERROR nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2315.172886] env[67015]: Faults: ['InvalidArgument'] [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Traceback (most recent call last): [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self.driver.spawn(context, instance, image_meta, [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self._fetch_image_if_missing(context, vi) [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] image_cache(vi, tmp_image_ds_loc) [ 2315.172886] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] vm_util.copy_virtual_disk( [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] session._wait_for_task(vmdk_copy_task) [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return self.wait_for_task(task_ref) [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return evt.wait() [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] result = hub.switch() [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] return self.greenlet.switch() [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2315.173259] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] self.f(*self.args, **self.kw) [ 2315.173609] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2315.173609] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] raise exceptions.translate_fault(task_info.error) [ 2315.173609] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2315.173609] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Faults: ['InvalidArgument'] [ 2315.173609] env[67015]: ERROR nova.compute.manager [instance: 232b89d8-08a1-45af-91e6-1dc979880009] [ 2315.173609] env[67015]: DEBUG nova.compute.utils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2315.174968] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Build of instance 232b89d8-08a1-45af-91e6-1dc979880009 was re-scheduled: A specified parameter was not correct: fileType [ 2315.174968] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2315.175352] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2315.175528] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2315.175745] env[67015]: DEBUG nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2315.175869] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2315.466482] env[67015]: DEBUG nova.network.neutron [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.480719] env[67015]: INFO nova.compute.manager [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Took 0.30 seconds to deallocate network for instance. [ 2315.578367] env[67015]: INFO nova.scheduler.client.report [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Deleted allocations for instance 232b89d8-08a1-45af-91e6-1dc979880009 [ 2315.602196] env[67015]: DEBUG oslo_concurrency.lockutils [None req-22f01ee9-90a4-4fb8-9293-579a7adef9dc tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 557.058s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.602486] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 361.185s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.603172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Acquiring lock "232b89d8-08a1-45af-91e6-1dc979880009-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2315.603172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.603172] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.605207] env[67015]: INFO nova.compute.manager [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Terminating instance [ 2315.607402] env[67015]: DEBUG nova.compute.manager [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2315.607402] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2315.607855] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f17cd30-3170-4d53-b398-18f8f3aac220 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.617927] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be906c75-d33b-45f1-ac33-f0241fc66abd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.645629] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 232b89d8-08a1-45af-91e6-1dc979880009 could not be found. [ 2315.645852] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2315.646046] env[67015]: INFO nova.compute.manager [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2315.646300] env[67015]: DEBUG oslo.service.loopingcall [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2315.646521] env[67015]: DEBUG nova.compute.manager [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2315.646617] env[67015]: DEBUG nova.network.neutron [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2315.674626] env[67015]: DEBUG nova.network.neutron [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.683437] env[67015]: INFO nova.compute.manager [-] [instance: 232b89d8-08a1-45af-91e6-1dc979880009] Took 0.04 seconds to deallocate network for instance. [ 2315.785340] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e06dde4c-78b1-4062-bbe0-12978f7351b2 tempest-ServerMetadataTestJSON-59594459 tempest-ServerMetadataTestJSON-59594459-project-member] Lock "232b89d8-08a1-45af-91e6-1dc979880009" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.515623] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2343.513861] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2343.514187] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.514470] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.526432] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.526664] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.526834] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.526991] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2347.530892] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48490b72-0912-43ee-a535-4d6d557c71ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.542391] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e8bb7c-e167-4789-9e71-ba4737c9d64b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.556887] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2c8754-fb5c-4f45-9330-69996af3bf9d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.564027] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bae9d99-377d-4280-a014-5ffed733efe4 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.593852] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181052MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2347.594246] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.594596] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.685925] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2347.686138] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2347.686274] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2347.686404] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2347.686538] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d85e398-6c75-4311-8e23-32d811e211f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2347.686948] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2347.687148] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2347.704468] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing inventories for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2347.718391] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating ProviderTree inventory for provider 82311841-8ff3-4f49-9053-67c5a45ef771 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2347.718605] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Updating inventory in ProviderTree for provider 82311841-8ff3-4f49-9053-67c5a45ef771 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2347.728679] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing aggregate associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, aggregates: None {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2347.743875] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Refreshing trait associations for resource provider 82311841-8ff3-4f49-9053-67c5a45ef771, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67015) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2347.809331] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a0b28a-4a7e-45ea-9006-1e71b63c8403 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.817039] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d02878b-eff2-4c96-b416-743ce86b461f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.845673] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f906e76-64a4-4856-80d0-c837c5e0aa0f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.852739] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf0228b-51aa-40f1-82a7-86450f52da99 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.865203] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2347.876543] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2347.883042] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquiring lock "98e42e37-e7d8-46f7-96c0-792f11f77c24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.883233] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Lock "98e42e37-e7d8-46f7-96c0-792f11f77c24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.888424] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2347.888507] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.294s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.893286] env[67015]: DEBUG nova.compute.manager [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2347.988950] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.989288] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.990822] env[67015]: INFO nova.compute.claims [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2348.114026] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d56967-4c47-4896-902f-441008e18531 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.121030] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d3e82d-37a1-48d3-94ae-b31e07bfd130 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.149964] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38284795-6c22-43dc-b039-d4fcbccdbb1e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.156998] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd97b529-087d-4add-9e5d-56929eccd286 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.171405] env[67015]: DEBUG nova.compute.provider_tree [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2348.179860] env[67015]: DEBUG nova.scheduler.client.report [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2348.198605] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.209s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.199121] env[67015]: DEBUG nova.compute.manager [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2348.230779] env[67015]: DEBUG nova.compute.utils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2348.232603] env[67015]: DEBUG nova.compute.manager [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Not allocating networking since 'none' was specified. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2348.240649] env[67015]: DEBUG nova.compute.manager [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2348.298561] env[67015]: DEBUG nova.compute.manager [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2348.322548] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2348.322784] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2348.322943] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2348.323141] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2348.323296] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2348.323450] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2348.323668] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2348.323821] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2348.323987] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2348.324169] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2348.324347] env[67015]: DEBUG nova.virt.hardware [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2348.325247] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61acfd59-6e87-46bf-9c84-8404b5dbaa8b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.333103] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b009e64-870c-4f16-870d-56f6e3ae8d4a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.346500] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Instance VIF info [] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2348.351979] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Creating folder: Project (366350e7455c42a8bef47f391153ef1e). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2348.352261] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d45de9f-7fc9-47f6-abd3-8c63941f98bd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.361529] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Created folder: Project (366350e7455c42a8bef47f391153ef1e) in parent group-v623108. [ 2348.361711] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Creating folder: Instances. Parent ref: group-v623234. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2348.361911] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c080c283-86d6-44f1-836b-ab8db71aff8a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.370213] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Created folder: Instances in parent group-v623234. [ 2348.370450] env[67015]: DEBUG oslo.service.loopingcall [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2348.370622] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2348.370809] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ded43a84-f53b-4d03-91a1-0344b1abde9c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.386137] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2348.386137] env[67015]: value = "task-3114607" [ 2348.386137] env[67015]: _type = "Task" [ 2348.386137] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2348.392898] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114607, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2348.898317] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114607, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.396169] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114607, 'name': CreateVM_Task} progress is 99%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2349.513946] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.514304] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.514549] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2349.524121] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] There are 0 instances to clean {{(pid=67015) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 2349.896822] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114607, 'name': CreateVM_Task, 'duration_secs': 1.275047} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2349.896960] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2349.897385] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2349.897550] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2349.897948] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2349.898226] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ba008b-a5c0-4062-b36d-4913bf80674f {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.902384] env[67015]: DEBUG oslo_vmware.api [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Waiting for the task: (returnval){ [ 2349.902384] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]52b48478-331f-0342-c511-1ecc3e217f17" [ 2349.902384] env[67015]: _type = "Task" [ 2349.902384] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2349.909410] env[67015]: DEBUG oslo_vmware.api [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]52b48478-331f-0342-c511-1ecc3e217f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2350.412628] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2350.412900] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2350.413132] env[67015]: DEBUG oslo_concurrency.lockutils [None req-03e03f51-e8bc-4bf5-81a4-c2041cf59317 tempest-ServerShowV254Test-1045237187 tempest-ServerShowV254Test-1045237187-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2350.523932] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.514455] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.514768] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.514768] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2353.514768] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2353.530937] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531095] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531219] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531352] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531479] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531604] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2353.531729] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2354.514238] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2354.514445] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2355.510616] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.515707] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.516041] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Cleaning up deleted instances with incomplete migration {{(pid=67015) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 2361.514556] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2364.770945] env[67015]: WARNING oslo_vmware.rw_handles [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2364.770945] env[67015]: ERROR oslo_vmware.rw_handles [ 2364.770945] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2364.772074] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2364.772479] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Copying Virtual Disk [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/f8776cf3-6e9f-4de8-9d35-7b2f3341770f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2364.772934] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc8341a4-9450-456b-98fd-5d8fcfb798fb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.780577] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for the task: (returnval){ [ 2364.780577] env[67015]: value = "task-3114608" [ 2364.780577] env[67015]: _type = "Task" [ 2364.780577] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.789072] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Task: {'id': task-3114608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.291231] env[67015]: DEBUG oslo_vmware.exceptions [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2365.291526] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2365.292124] env[67015]: ERROR nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2365.292124] env[67015]: Faults: ['InvalidArgument'] [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Traceback (most recent call last): [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] yield resources [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self.driver.spawn(context, instance, image_meta, [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self._fetch_image_if_missing(context, vi) [ 2365.292124] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] image_cache(vi, tmp_image_ds_loc) [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] vm_util.copy_virtual_disk( [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] session._wait_for_task(vmdk_copy_task) [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return self.wait_for_task(task_ref) [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return evt.wait() [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] result = hub.switch() [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2365.292535] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return self.greenlet.switch() [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self.f(*self.args, **self.kw) [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] raise exceptions.translate_fault(task_info.error) [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Faults: ['InvalidArgument'] [ 2365.292877] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] [ 2365.292877] env[67015]: INFO nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Terminating instance [ 2365.294044] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2365.294254] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2365.294491] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f1ec094-de29-407d-a3e5-99e6b1106a82 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.296948] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2365.297151] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2365.297858] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f343a5cc-4a69-4636-85fe-19739b8c85e9 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.305872] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2365.306090] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-518c501b-f910-4f37-87e7-680b01c4a163 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.308078] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2365.308257] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2365.309253] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f0c330f-62b5-46c6-9df9-ab5288977ed6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.313572] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for the task: (returnval){ [ 2365.313572] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]529ed752-c9d9-a75f-faa8-c64d31d61ef5" [ 2365.313572] env[67015]: _type = "Task" [ 2365.313572] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.320635] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]529ed752-c9d9-a75f-faa8-c64d31d61ef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.380986] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2365.381235] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2365.381361] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Deleting the datastore file [datastore2] 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2365.381624] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d881eab7-c466-45c2-8612-8652ac65ca15 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.387781] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for the task: (returnval){ [ 2365.387781] env[67015]: value = "task-3114610" [ 2365.387781] env[67015]: _type = "Task" [ 2365.387781] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.395106] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Task: {'id': task-3114610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.826018] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2365.826018] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Creating directory with path [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2365.826018] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63fc5310-aef8-4397-bbd3-43da8a801c8b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.835410] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Created directory with path [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2365.835613] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Fetch image to [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2365.835799] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2365.836536] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0efdc7-3643-4251-857a-49bb4fa9aad8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.843132] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66dc19f-733f-4c35-8d3a-fceb04bbfcb2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.852028] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9839781-d20a-4d7f-a9c5-d29b0c3f7e04 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.882453] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9f7456-0df0-4992-840f-2a7f448acd10 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.887835] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-536a13ef-f4e9-46f9-b17f-551efbc1625e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.896781] env[67015]: DEBUG oslo_vmware.api [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Task: {'id': task-3114610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063925} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2365.897030] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2365.897221] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2365.897396] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2365.897571] env[67015]: INFO nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2365.899651] env[67015]: DEBUG nova.compute.claims [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2365.899815] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2365.900037] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2365.918516] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2365.969125] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2366.032534] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2366.032730] env[67015]: DEBUG oslo_vmware.rw_handles [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2366.097144] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6398b7fe-104d-400b-bdd3-789dd5ace457 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.104056] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0032ac05-a989-4db6-9931-bad6e787962e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.132644] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91de874f-6bd4-46d4-8125-5609a2f9e691 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.139191] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ab7b01-70a0-4ae7-9050-8f3b825c3d17 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.153385] env[67015]: DEBUG nova.compute.provider_tree [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2366.161605] env[67015]: DEBUG nova.scheduler.client.report [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2366.175403] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.275s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2366.175921] env[67015]: ERROR nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2366.175921] env[67015]: Faults: ['InvalidArgument'] [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Traceback (most recent call last): [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self.driver.spawn(context, instance, image_meta, [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self._fetch_image_if_missing(context, vi) [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] image_cache(vi, tmp_image_ds_loc) [ 2366.175921] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] vm_util.copy_virtual_disk( [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] session._wait_for_task(vmdk_copy_task) [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return self.wait_for_task(task_ref) [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return evt.wait() [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] result = hub.switch() [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] return self.greenlet.switch() [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2366.176296] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] self.f(*self.args, **self.kw) [ 2366.176588] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2366.176588] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] raise exceptions.translate_fault(task_info.error) [ 2366.176588] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2366.176588] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Faults: ['InvalidArgument'] [ 2366.176588] env[67015]: ERROR nova.compute.manager [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] [ 2366.176705] env[67015]: DEBUG nova.compute.utils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2366.178404] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Build of instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 was re-scheduled: A specified parameter was not correct: fileType [ 2366.178404] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2366.178785] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2366.178959] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2366.179147] env[67015]: DEBUG nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2366.179309] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2366.493459] env[67015]: DEBUG nova.network.neutron [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2366.512350] env[67015]: INFO nova.compute.manager [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Took 0.33 seconds to deallocate network for instance. [ 2366.873685] env[67015]: INFO nova.scheduler.client.report [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Deleted allocations for instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 [ 2366.967754] env[67015]: DEBUG oslo_concurrency.lockutils [None req-7fb432b7-938e-4717-bbe2-30c25e1d6a62 tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.997s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2366.968117] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.102s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2366.968455] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Acquiring lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.968997] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2366.969237] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2366.974245] env[67015]: INFO nova.compute.manager [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Terminating instance [ 2366.976810] env[67015]: DEBUG nova.compute.manager [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2366.978455] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2366.978455] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ecf6ccc-c4d1-418a-bfb6-907faf91dda1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.987418] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45f5e6f-7b18-404d-b597-8021491c0003 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.030712] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03 could not be found. [ 2367.030918] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2367.031126] env[67015]: INFO nova.compute.manager [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2367.031380] env[67015]: DEBUG oslo.service.loopingcall [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2367.033550] env[67015]: DEBUG nova.compute.manager [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2367.033668] env[67015]: DEBUG nova.network.neutron [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2367.098337] env[67015]: DEBUG nova.network.neutron [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2367.116813] env[67015]: INFO nova.compute.manager [-] [instance: 8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03] Took 0.08 seconds to deallocate network for instance. [ 2367.330837] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ef5bebc4-767e-4387-938c-8582f4b544ef tempest-InstanceActionsTestJSON-1681403804 tempest-InstanceActionsTestJSON-1681403804-project-member] Lock "8bac9ae9-a074-4f3d-ae82-a8aaa9b6db03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.363s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.839181] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.839584] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 2370.839584] env[67015]: value = "domain-c8" [ 2370.839584] env[67015]: _type = "ClusterComputeResource" [ 2370.839584] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2370.840595] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7894e5-eebd-439b-bd9b-1bc0e91ebcad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.854587] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 5 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2376.857600] env[67015]: DEBUG oslo_concurrency.lockutils [None req-d594d9af-2d62-4c18-a100-8b0dad36253b tempest-AttachVolumeShelveTestJSON-1186873896 tempest-AttachVolumeShelveTestJSON-1186873896-project-member] Acquiring lock "6d85e398-6c75-4311-8e23-32d811e211f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.907810] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquiring lock "959b5f5a-18d2-4dff-8a43-bfba04947822" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.907810] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Lock "959b5f5a-18d2-4dff-8a43-bfba04947822" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.917968] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Starting instance... {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2386.972834] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2386.973113] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2386.974658] env[67015]: INFO nova.compute.claims [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2387.104512] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8015e254-9e8a-436d-aece-dc0b65fa8878 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.112860] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab2539f-a1c8-42cd-83e7-5dbb47ac5f6c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.145366] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690aaaab-21a7-475f-bf8d-28ec13b0ed03 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.152855] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee5f22e-be3e-41da-9a2c-a9d3e8f24dcd {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.165957] env[67015]: DEBUG nova.compute.provider_tree [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2387.174453] env[67015]: DEBUG nova.scheduler.client.report [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2387.188684] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.215s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.189256] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Start building networks asynchronously for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2387.221241] env[67015]: DEBUG nova.compute.utils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Using /dev/sd instead of None {{(pid=67015) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2387.225826] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Allocating IP information in the background. {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2387.225826] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] allocate_for_instance() {{(pid=67015) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2387.238832] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Start building block device mappings for instance. {{(pid=67015) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2387.281526] env[67015]: DEBUG nova.policy [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b248be0eeaa4dba9171108048623575', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e31f10783f184cd182e4ee65e3f2fc05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67015) authorize /opt/stack/nova/nova/policy.py:203}} [ 2387.302027] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Start spawning the instance on the hypervisor. {{(pid=67015) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2387.326492] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-20T08:17:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-20T08:16:53Z,direct_url=,disk_format='vmdk',id=8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='482f98aba88c4103b6a8d7c7ab5d030d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-20T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2387.326732] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Flavor limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2387.326890] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Image limits 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2387.327088] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Flavor pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2387.327238] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Image pref 0:0:0 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2387.327388] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67015) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2387.327595] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2387.327753] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2387.327922] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Got 1 possible topologies {{(pid=67015) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2387.328102] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2387.328281] env[67015]: DEBUG nova.virt.hardware [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67015) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2387.329165] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2101703-bdea-49bd-b792-0aec9b004a1c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.336762] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dd9f80-a985-4d38-a6fe-54aa67798a6d {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.584618] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Successfully created port: 7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2387.812579] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.832343] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Getting list of instances from cluster (obj){ [ 2387.832343] env[67015]: value = "domain-c8" [ 2387.832343] env[67015]: _type = "ClusterComputeResource" [ 2387.832343] env[67015]: } {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2387.833109] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92a80bd-c216-4c3a-bdfd-4d068b33e75a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.846301] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Got total of 5 instances {{(pid=67015) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2387.846636] env[67015]: WARNING nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] While synchronizing instance power states, found 6 instances in the database and 5 instances on the hypervisor. [ 2387.846636] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid dea0f558-4d4b-41f4-9df9-c997835a628c {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.846851] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 94751383-c885-4039-88b3-c1f6d3460e23 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.847106] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.847314] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 6d85e398-6c75-4311-8e23-32d811e211f6 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.847516] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 98e42e37-e7d8-46f7-96c0-792f11f77c24 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.847703] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Triggering sync for uuid 959b5f5a-18d2-4dff-8a43-bfba04947822 {{(pid=67015) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 2387.848055] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "dea0f558-4d4b-41f4-9df9-c997835a628c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.848418] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "94751383-c885-4039-88b3-c1f6d3460e23" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.848595] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.848866] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "6d85e398-6c75-4311-8e23-32d811e211f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.849252] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "98e42e37-e7d8-46f7-96c0-792f11f77c24" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.849371] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "959b5f5a-18d2-4dff-8a43-bfba04947822" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2388.095947] env[67015]: DEBUG nova.compute.manager [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Received event network-vif-plugged-7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2388.096210] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] Acquiring lock "959b5f5a-18d2-4dff-8a43-bfba04947822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2388.096418] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] Lock "959b5f5a-18d2-4dff-8a43-bfba04947822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2388.096601] env[67015]: DEBUG oslo_concurrency.lockutils [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] Lock "959b5f5a-18d2-4dff-8a43-bfba04947822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2388.096785] env[67015]: DEBUG nova.compute.manager [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] No waiting events found dispatching network-vif-plugged-7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2388.096957] env[67015]: WARNING nova.compute.manager [req-ea1fad59-a9ea-434e-9920-074feb963a4b req-df11653a-307c-4302-8f99-210b8122361a service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Received unexpected event network-vif-plugged-7402deab-e771-4d45-8e3c-60b45ffeab97 for instance with vm_state building and task_state spawning. [ 2388.175139] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Successfully updated port: 7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2388.187702] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquiring lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2388.188099] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquired lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2388.188099] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Building network info cache for instance {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2388.233452] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Instance cache missing network info. {{(pid=67015) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2388.395124] env[67015]: DEBUG nova.network.neutron [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Updating instance_info_cache with network_info: [{"id": "7402deab-e771-4d45-8e3c-60b45ffeab97", "address": "fa:16:3e:90:e7:db", "network": {"id": "baa607c6-7e4e-4e72-9701-c7d84049131d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1804919477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e31f10783f184cd182e4ee65e3f2fc05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7402deab-e7", "ovs_interfaceid": "7402deab-e771-4d45-8e3c-60b45ffeab97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2388.405447] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Releasing lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2388.405732] env[67015]: DEBUG nova.compute.manager [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Instance network_info: |[{"id": "7402deab-e771-4d45-8e3c-60b45ffeab97", "address": "fa:16:3e:90:e7:db", "network": {"id": "baa607c6-7e4e-4e72-9701-c7d84049131d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1804919477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e31f10783f184cd182e4ee65e3f2fc05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7402deab-e7", "ovs_interfaceid": "7402deab-e771-4d45-8e3c-60b45ffeab97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67015) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2388.406179] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:e7:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7402deab-e771-4d45-8e3c-60b45ffeab97', 'vif_model': 'vmxnet3'}] {{(pid=67015) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2388.413555] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Creating folder: Project (e31f10783f184cd182e4ee65e3f2fc05). Parent ref: group-v623108. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2388.414068] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f25ddcf-af48-4ab8-becd-bf8de8e5d7d6 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.425013] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Created folder: Project (e31f10783f184cd182e4ee65e3f2fc05) in parent group-v623108. [ 2388.425221] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Creating folder: Instances. Parent ref: group-v623237. {{(pid=67015) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2388.425438] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbfb2048-0184-4905-b5e8-b3800481dab8 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.434243] env[67015]: INFO nova.virt.vmwareapi.vm_util [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Created folder: Instances in parent group-v623237. [ 2388.434459] env[67015]: DEBUG oslo.service.loopingcall [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2388.434637] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Creating VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2388.434811] env[67015]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb184016-7f66-4cf4-b592-be749dd31317 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.452639] env[67015]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2388.452639] env[67015]: value = "task-3114613" [ 2388.452639] env[67015]: _type = "Task" [ 2388.452639] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2388.462966] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114613, 'name': CreateVM_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.962600] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114613, 'name': CreateVM_Task} progress is 25%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.463861] env[67015]: DEBUG oslo_vmware.api [-] Task: {'id': task-3114613, 'name': CreateVM_Task, 'duration_secs': 0.667309} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2389.465019] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Created VM on the ESX host {{(pid=67015) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2389.465101] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2389.465275] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2389.465590] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2389.465839] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-444dbd83-c8ed-4847-abbf-b48b3d7dde0e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.470651] env[67015]: DEBUG oslo_vmware.api [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Waiting for the task: (returnval){ [ 2389.470651] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]526de5f9-b631-74f7-4f87-a06e1525f32f" [ 2389.470651] env[67015]: _type = "Task" [ 2389.470651] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2389.477839] env[67015]: DEBUG oslo_vmware.api [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]526de5f9-b631-74f7-4f87-a06e1525f32f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.981159] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2389.981429] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Processing image 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2389.981645] env[67015]: DEBUG oslo_concurrency.lockutils [None req-955c4ea3-f7c6-4b69-9e94-90b03e1a3ba9 tempest-AttachVolumeTestJSON-57900212 tempest-AttachVolumeTestJSON-57900212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2390.120686] env[67015]: DEBUG nova.compute.manager [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Received event network-changed-7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 2390.120851] env[67015]: DEBUG nova.compute.manager [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Refreshing instance network info cache due to event network-changed-7402deab-e771-4d45-8e3c-60b45ffeab97. {{(pid=67015) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 2390.121083] env[67015]: DEBUG oslo_concurrency.lockutils [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] Acquiring lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2390.121230] env[67015]: DEBUG oslo_concurrency.lockutils [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] Acquired lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2390.121391] env[67015]: DEBUG nova.network.neutron [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Refreshing network info cache for port 7402deab-e771-4d45-8e3c-60b45ffeab97 {{(pid=67015) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2390.360527] env[67015]: DEBUG nova.network.neutron [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Updated VIF entry in instance network info cache for port 7402deab-e771-4d45-8e3c-60b45ffeab97. {{(pid=67015) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2390.360875] env[67015]: DEBUG nova.network.neutron [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Updating instance_info_cache with network_info: [{"id": "7402deab-e771-4d45-8e3c-60b45ffeab97", "address": "fa:16:3e:90:e7:db", "network": {"id": "baa607c6-7e4e-4e72-9701-c7d84049131d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1804919477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e31f10783f184cd182e4ee65e3f2fc05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7402deab-e7", "ovs_interfaceid": "7402deab-e771-4d45-8e3c-60b45ffeab97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2390.370371] env[67015]: DEBUG oslo_concurrency.lockutils [req-8b6e1a62-fd86-4b4b-bb6b-5c80368e4a2d req-5b84f052-ab5b-43e5-92ae-443d66b6f23c service nova] Releasing lock "refresh_cache-959b5f5a-18d2-4dff-8a43-bfba04947822" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2401.551274] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.515699] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.516115] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.514308] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.528040] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.528298] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2407.528501] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2407.528860] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67015) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2407.531436] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5ffc09-9169-44a8-abfe-57d81fba32da {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.539813] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40137750-a322-4fca-8527-a77e4dbca2be {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.553529] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81b6845-0292-40dc-92d4-160a52fab074 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.560204] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e9371f-e0f0-4e3a-8ce2-dbe596140022 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.590278] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181040MB free_disk=135GB free_vcpus=48 pci_devices=None {{(pid=67015) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2407.590838] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.590838] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2407.656844] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance dea0f558-4d4b-41f4-9df9-c997835a628c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657018] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 94751383-c885-4039-88b3-c1f6d3460e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657155] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657281] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 6d85e398-6c75-4311-8e23-32d811e211f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657430] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 98e42e37-e7d8-46f7-96c0-792f11f77c24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657614] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Instance 959b5f5a-18d2-4dff-8a43-bfba04947822 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67015) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.657815] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2407.657971] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=67015) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2407.748112] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8253fee1-11fd-4ee9-85b7-b91a230add54 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.755675] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20cb2ac-3a46-4254-b8c9-ef63f0c0263b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.787261] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5537888-431b-4c17-a617-7ad32a15b8ce {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.794646] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc205097-d00e-4260-b2ef-779b90cb823c {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.807564] env[67015]: DEBUG nova.compute.provider_tree [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2407.815987] env[67015]: DEBUG nova.scheduler.client.report [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2407.829736] env[67015]: DEBUG nova.compute.resource_tracker [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67015) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2407.829908] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.239s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2410.825887] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.268377] env[67015]: WARNING oslo_vmware.rw_handles [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles response.begin() [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2411.268377] env[67015]: ERROR oslo_vmware.rw_handles [ 2411.268836] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Downloaded image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2411.270962] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Caching image {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2411.271353] env[67015]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Copying Virtual Disk [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk to [datastore2] vmware_temp/9a91d9c0-16df-4df6-b978-37a485690c4f/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk {{(pid=67015) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2411.271650] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba72ca22-a105-4fcf-874e-b4fbc6ceaefb {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.282366] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for the task: (returnval){ [ 2411.282366] env[67015]: value = "task-3114614" [ 2411.282366] env[67015]: _type = "Task" [ 2411.282366] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.296774] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Task: {'id': task-3114614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.517370] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.792760] env[67015]: DEBUG oslo_vmware.exceptions [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Fault InvalidArgument not matched. {{(pid=67015) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2411.793081] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2411.793664] env[67015]: ERROR nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.793664] env[67015]: Faults: ['InvalidArgument'] [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Traceback (most recent call last): [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] yield resources [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self.driver.spawn(context, instance, image_meta, [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self._fetch_image_if_missing(context, vi) [ 2411.793664] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] image_cache(vi, tmp_image_ds_loc) [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] vm_util.copy_virtual_disk( [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] session._wait_for_task(vmdk_copy_task) [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return self.wait_for_task(task_ref) [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return evt.wait() [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] result = hub.switch() [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2411.794033] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return self.greenlet.switch() [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self.f(*self.args, **self.kw) [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] raise exceptions.translate_fault(task_info.error) [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Faults: ['InvalidArgument'] [ 2411.794351] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] [ 2411.794351] env[67015]: INFO nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Terminating instance [ 2411.796926] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2411.797180] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2411.797463] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982.vmdk" {{(pid=67015) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2411.797655] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2411.798426] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa1dd00-a136-4820-a621-824b529da866 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.803018] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5d8969a-2144-4be4-82a7-6e11427100c2 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.812418] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Unregistering the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2411.816025] env[67015]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5e7919d-60a1-49fc-b366-5a72527ba34a {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.816025] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2411.816025] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67015) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2411.816464] env[67015]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c768ba7-646b-4fd4-a0c8-c4a588ae9bcc {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.822182] env[67015]: DEBUG oslo_vmware.api [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Waiting for the task: (returnval){ [ 2411.822182] env[67015]: value = "session[52d1ddb0-379a-467e-f35d-8689794924b2]527822c2-dfda-8dd8-3052-d13499a8907b" [ 2411.822182] env[67015]: _type = "Task" [ 2411.822182] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.829531] env[67015]: DEBUG oslo_vmware.api [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Task: {'id': session[52d1ddb0-379a-467e-f35d-8689794924b2]527822c2-dfda-8dd8-3052-d13499a8907b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.881839] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Unregistered the VM {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2411.885013] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Deleting contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2411.885013] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Deleting the datastore file [datastore2] dea0f558-4d4b-41f4-9df9-c997835a628c {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2411.885013] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ed30475-9821-4331-abf4-d0fccd801a94 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.893146] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for the task: (returnval){ [ 2411.893146] env[67015]: value = "task-3114616" [ 2411.893146] env[67015]: _type = "Task" [ 2411.893146] env[67015]: } to complete. {{(pid=67015) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.901334] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Task: {'id': task-3114616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.335180] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Preparing fetch location {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2412.335180] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating directory with path [datastore2] vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2412.335180] env[67015]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d62e6dd5-aa85-4a72-87e4-b209a7024ca1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.349402] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Created directory with path [datastore2] vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 {{(pid=67015) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2412.349402] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Fetch image to [datastore2] vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk {{(pid=67015) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2412.349402] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to [datastore2] vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk on the data store datastore2 {{(pid=67015) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2412.349402] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d92923-d31a-45b8-b5b3-5ced6d857030 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.356653] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bbf27b-d66f-4ee0-afa3-44c64e71d4ec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.367467] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62dff8d-ae26-4ee5-ae57-4a11317be17b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.404637] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a7fada-f992-4022-ad1a-5b85ed40b015 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.415756] env[67015]: DEBUG oslo_vmware.api [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Task: {'id': task-3114616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077993} completed successfully. {{(pid=67015) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2412.416358] env[67015]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Deleted the datastore file {{(pid=67015) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2412.416578] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Deleted contents of the VM from datastore datastore2 {{(pid=67015) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2412.416793] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2412.416999] env[67015]: INFO nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2412.419021] env[67015]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2a60b4b5-0eb4-4631-bed6-c20e87d76cec {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.421232] env[67015]: DEBUG nova.compute.claims [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Aborting claim: {{(pid=67015) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2412.421449] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.421697] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.446667] env[67015]: DEBUG nova.virt.vmwareapi.images [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Downloading image file data 8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982 to the data store datastore2 {{(pid=67015) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2412.515875] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.601966] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8054d2cb-8922-4580-95a2-4e0a14c4331b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.606052] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2412.610820] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce16120-6922-490c-ada0-99bc1a51e2c1 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.699571] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Completed reading data from the image iterator. {{(pid=67015) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2412.699767] env[67015]: DEBUG oslo_vmware.rw_handles [None req-ea5fdfe5-3c2f-4e6a-ac16-342e62c2688b tempest-ServersTestJSON-767920299 tempest-ServersTestJSON-767920299-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b4b3bed2-59d6-4146-8673-0f54a1a2b8e0/8cdbc1be-eeab-4ebb-a816-4a6cfe6e5982/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67015) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2412.700644] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6ba490-e759-4d45-a250-aede7bb40048 {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.708315] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c587fb44-956e-42f9-851f-46c3911305ad {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.722452] env[67015]: DEBUG nova.compute.provider_tree [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Inventory has not changed in ProviderTree for provider: 82311841-8ff3-4f49-9053-67c5a45ef771 {{(pid=67015) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.731311] env[67015]: DEBUG nova.scheduler.client.report [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Inventory has not changed for provider 82311841-8ff3-4f49-9053-67c5a45ef771 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 135, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67015) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2412.745727] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.746306] env[67015]: ERROR nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.746306] env[67015]: Faults: ['InvalidArgument'] [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Traceback (most recent call last): [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self.driver.spawn(context, instance, image_meta, [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self._fetch_image_if_missing(context, vi) [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] image_cache(vi, tmp_image_ds_loc) [ 2412.746306] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] vm_util.copy_virtual_disk( [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] session._wait_for_task(vmdk_copy_task) [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return self.wait_for_task(task_ref) [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return evt.wait() [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] result = hub.switch() [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] return self.greenlet.switch() [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2412.746753] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] self.f(*self.args, **self.kw) [ 2412.747139] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2412.747139] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] raise exceptions.translate_fault(task_info.error) [ 2412.747139] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.747139] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Faults: ['InvalidArgument'] [ 2412.747139] env[67015]: ERROR nova.compute.manager [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] [ 2412.747139] env[67015]: DEBUG nova.compute.utils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] VimFaultException {{(pid=67015) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2412.748476] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Build of instance dea0f558-4d4b-41f4-9df9-c997835a628c was re-scheduled: A specified parameter was not correct: fileType [ 2412.748476] env[67015]: Faults: ['InvalidArgument'] {{(pid=67015) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2412.748843] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Unplugging VIFs for instance {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2412.749027] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67015) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2412.749260] env[67015]: DEBUG nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2412.749438] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2413.096173] env[67015]: DEBUG nova.network.neutron [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.111480] env[67015]: INFO nova.compute.manager [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Took 0.36 seconds to deallocate network for instance. [ 2413.229546] env[67015]: INFO nova.scheduler.client.report [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Deleted allocations for instance dea0f558-4d4b-41f4-9df9-c997835a628c [ 2413.249760] env[67015]: DEBUG oslo_concurrency.lockutils [None req-e7b5869c-9b75-406c-80b0-5c2d3259d82e tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 459.303s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.250014] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 264.116s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.250472] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Acquiring lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.250472] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.250762] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.252643] env[67015]: INFO nova.compute.manager [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Terminating instance [ 2413.254302] env[67015]: DEBUG nova.compute.manager [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Start destroying the instance on the hypervisor. {{(pid=67015) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2413.254496] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Destroying instance {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2413.254959] env[67015]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16a4d213-0b5e-41b4-87a0-87fb62b2a18b {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.264307] env[67015]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4961f7-ee60-447a-8a27-31cebd8c419e {{(pid=67015) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.291173] env[67015]: WARNING nova.virt.vmwareapi.vmops [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dea0f558-4d4b-41f4-9df9-c997835a628c could not be found. [ 2413.291433] env[67015]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Instance destroyed {{(pid=67015) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2413.291623] env[67015]: INFO nova.compute.manager [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2413.291861] env[67015]: DEBUG oslo.service.loopingcall [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67015) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2413.292093] env[67015]: DEBUG nova.compute.manager [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Deallocating network for instance {{(pid=67015) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2413.292194] env[67015]: DEBUG nova.network.neutron [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] deallocate_for_instance() {{(pid=67015) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2413.314976] env[67015]: DEBUG nova.network.neutron [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Updating instance_info_cache with network_info: [] {{(pid=67015) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.327203] env[67015]: INFO nova.compute.manager [-] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] Took 0.03 seconds to deallocate network for instance. [ 2413.426012] env[67015]: DEBUG oslo_concurrency.lockutils [None req-9b7c153a-c52a-4296-8812-a28193613119 tempest-ServerAddressesNegativeTestJSON-127762532 tempest-ServerAddressesNegativeTestJSON-127762532-project-member] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.426815] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.579s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.427039] env[67015]: INFO nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: dea0f558-4d4b-41f4-9df9-c997835a628c] During sync_power_state the instance has a pending task (deleting). Skip. [ 2413.427257] env[67015]: DEBUG oslo_concurrency.lockutils [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Lock "dea0f558-4d4b-41f4-9df9-c997835a628c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67015) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.514370] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.514752] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Starting heal instance info cache {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2415.514752] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Rebuilding the list of instances to heal {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2415.532288] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 94751383-c885-4039-88b3-c1f6d3460e23] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2415.532453] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6f1d5a4e-c6f2-4d9b-9649-22b28b171bb5] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2415.532571] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 6d85e398-6c75-4311-8e23-32d811e211f6] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2415.532701] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 98e42e37-e7d8-46f7-96c0-792f11f77c24] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2415.532828] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] [instance: 959b5f5a-18d2-4dff-8a43-bfba04947822] Skipping network cache update for instance because it is Building. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 2415.532953] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Didn't find any instances for network info cache update. {{(pid=67015) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2416.514423] env[67015]: DEBUG oslo_service.periodic_task [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67015) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.514702] env[67015]: DEBUG nova.compute.manager [None req-ee4c236f-b99e-43fb-a134-d98435309811 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67015) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}}